diff --git a/sdk/storage/azure-storage-blob/MANIFEST.in b/sdk/storage/azure-storage-blob/MANIFEST.in index fb2f876c4a81..5da368cf0f74 100644 --- a/sdk/storage/azure-storage-blob/MANIFEST.in +++ b/sdk/storage/azure-storage-blob/MANIFEST.in @@ -1,7 +1,7 @@ include *.md -include azure/__init__.py -include azure/storage/__init__.py include LICENSE +include azure/storage/blob/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/storage/blob/py.typed +include azure/__init__.py +include azure/storage/__init__.py \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/_meta.json b/sdk/storage/azure-storage-blob/_meta.json new file mode 100644 index 000000000000..dab439068bed --- /dev/null +++ b/sdk/storage/azure-storage-blob/_meta.json @@ -0,0 +1,6 @@ +{ + "commit": "9e20a84f2bd17e28282cb7fc4f32c9db2dbe3b3f", + "repository_url": "https://github.com/test-repo-billy/azure-rest-api-specs", + "typespec_src": "specification/storage/Microsoft.BlobStroage", + "@azure-tools/typespec-python": "0.31.1" +} \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/azure/__init__.py b/sdk/storage/azure-storage-blob/azure/__init__.py index 0d1f7edf5dc6..d55ccad1f573 100644 --- a/sdk/storage/azure-storage-blob/azure/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/__init__.py index 0d1f7edf5dc6..d55ccad1f573 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py index 2386595611bd..9b08ab46a7ef 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py @@ -1,263 +1,26 @@ -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -import os -from typing import Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING +from ._client import BlobClient from ._version import VERSION -from ._blob_client import BlobClient -from ._container_client import ContainerClient -from ._blob_service_client import BlobServiceClient -from ._lease import BlobLeaseClient -from ._download import StorageStreamDownloader -from ._quick_query_helper import BlobQueryReader -from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas -from ._shared.policies import ExponentialRetry, LinearRetry -from ._shared.response_handlers import PartialBatchErrorException -from ._shared.models import ( - LocationMode, - ResourceTypes, - AccountSasPermissions, - StorageErrorCode, - UserDelegationKey, - Services -) -from ._generated.models import RehydratePriority -from ._models import ( - BlobType, - BlockState, - StandardBlobTier, - PremiumPageBlobTier, - BlobImmutabilityPolicyMode, - SequenceNumberAction, - PublicAccess, - BlobAnalyticsLogging, - Metrics, - RetentionPolicy, - StaticWebsite, - CorsRule, - ContainerProperties, - BlobProperties, - FilteredBlob, - LeaseProperties, - ContentSettings, - CopyProperties, - BlobBlock, - PageRange, - AccessPolicy, - ContainerSasPermissions, - BlobSasPermissions, - CustomerProvidedEncryptionKey, - ContainerEncryptionScope, - BlobQueryError, - DelimitedJsonDialect, - DelimitedTextDialect, - QuickQueryDialect, - ArrowDialect, - ArrowType, - ObjectReplicationPolicy, - ObjectReplicationRule, - ImmutabilityPolicy, -) -from ._list_blobs_helper import BlobPrefix - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential __version__ = VERSION - -def upload_blob_to_url( - blob_url: str, - data: Union[Iterable[AnyStr], IO[AnyStr]], - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any -) -> Dict[str, Any]: - """Upload data to a given URL - - The data will be uploaded as a block blob. - - :param str blob_url: - The full URI to the blob. This can also include a SAS token. - :param data: - The data to upload. This can be bytes, text, an iterable or a file-like object. - :type data: bytes or str or Iterable - :param credential: - The credentials with which to authenticate. This is optional if the - blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword bool overwrite: - Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob_to_url will overwrite any existing data. If set to False, the - operation will fail with a ResourceExistsError. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword dict(str,str) metadata: - Name-value pairs associated with the blob as metadata. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https as https (the default) will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword str encoding: - Encoding to use if text is supplied as input. Defaults to UTF-8. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict(str, Any) - """ - with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return cast(BlobClient, client).upload_blob(data=data, blob_type=BlobType.BLOCKBLOB, **kwargs) - - -def _download_to_stream(client: BlobClient, handle: IO[bytes], **kwargs: Any) -> None: - """ - Download data to specified open file-handle. - - :param BlobClient client: The BlobClient to download with. - :param Stream handle: A Stream to download the data into. - """ - stream = client.download_blob(**kwargs) - stream.readinto(handle) - - -def download_blob_from_url( - blob_url: str, - output: Union[str, IO[bytes]], - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any -) -> None: - """Download the contents of a blob to a local file or stream. - - :param str blob_url: - The full URI to the blob. This can also include a SAS token. - :param output: - Where the data should be downloaded to. This could be either a file path to write to, - or an open IO handle to write to. - :type output: str or writable stream. - :param credential: - The credentials with which to authenticate. This is optional if the - blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword bool overwrite: - Whether the local file should be overwritten if it already exists. The default value is - `False` - in which case a ValueError will be raised if the file already exists. If set to - `True`, an attempt will be made to write to the existing file. If a stream handle is passed - in, this value is ignored. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :keyword int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https as https (the default) will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :rtype: None - """ - overwrite = kwargs.pop('overwrite', False) - with BlobClient.from_blob_url(blob_url, credential=credential) as client: - if hasattr(output, 'write'): - _download_to_stream(client, cast(IO[bytes], output), **kwargs) - else: - if not overwrite and os.path.isfile(output): - raise ValueError(f"The file '{output}' already exists.") - with open(output, 'wb') as file_handle: - _download_to_stream(client, file_handle, **kwargs) - +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'upload_blob_to_url', - 'download_blob_from_url', - 'BlobServiceClient', - 'ContainerClient', - 'BlobClient', - 'BlobType', - 'BlobLeaseClient', - 'StorageErrorCode', - 'UserDelegationKey', - 'ExponentialRetry', - 'LinearRetry', - 'LocationMode', - 'BlockState', - 'StandardBlobTier', - 'PremiumPageBlobTier', - 'SequenceNumberAction', - 'BlobImmutabilityPolicyMode', - 'ImmutabilityPolicy', - 'PublicAccess', - 'BlobAnalyticsLogging', - 'Metrics', - 'RetentionPolicy', - 'StaticWebsite', - 'CorsRule', - 'ContainerProperties', - 'BlobProperties', - 'BlobPrefix', - 'FilteredBlob', - 'LeaseProperties', - 'ContentSettings', - 'CopyProperties', - 'BlobBlock', - 'PageRange', - 'AccessPolicy', - 'QuickQueryDialect', - 'ContainerSasPermissions', - 'BlobSasPermissions', - 'ResourceTypes', - 'AccountSasPermissions', - 'StorageStreamDownloader', - 'CustomerProvidedEncryptionKey', - 'RehydratePriority', - 'generate_account_sas', - 'generate_container_sas', - 'generate_blob_sas', - 'PartialBatchErrorException', - 'ContainerEncryptionScope', - 'BlobQueryError', - 'DelimitedJsonDialect', - 'DelimitedTextDialect', - 'ArrowDialect', - 'ArrowType', - 'BlobQueryReader', - 'ObjectReplicationPolicy', - 'ObjectReplicationRule', - 'Services', + "BlobClient", ] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py deleted file mode 100644 index fd63d959e1a8..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ /dev/null @@ -1,3290 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only - -import warnings -from datetime import datetime -from functools import partial -from typing import ( - Any, AnyStr, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, - TYPE_CHECKING -) -from typing_extensions import Self - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError -from azure.core.paging import ItemPaged -from azure.core.pipeline import Pipeline -from azure.core.tracing.decorator import distributed_trace -from ._blob_client_helpers import ( - _abort_copy_options, - _append_block_from_url_options, - _append_block_options, - _clear_page_options, - _commit_block_list_options, - _create_append_blob_options, - _create_page_blob_options, - _create_snapshot_options, - _delete_blob_options, - _download_blob_options, - _format_url, - _from_blob_url, - _get_blob_tags_options, - _get_block_list_result, - _get_page_ranges_options, - _parse_url, - _quick_query_options, - _resize_blob_options, - _seal_append_blob_options, - _set_blob_metadata_options, - _set_blob_tags_options, - _set_http_headers_options, - _set_sequence_number_options, - _stage_block_from_url_options, - _stage_block_options, - _start_copy_from_url_options, - _upload_blob_from_url_options, - _upload_blob_options, - _upload_page_options, - _upload_pages_from_url_options -) -from ._deserialize import ( - deserialize_blob_properties, - deserialize_pipeline_response_into_cls, - get_page_ranges_result, - parse_tags -) -from ._download import StorageStreamDownloader -from ._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION -from ._generated import AzureBlobStorage -from ._generated.models import CpkInfo -from ._lease import BlobLeaseClient -from ._models import BlobBlock, BlobProperties, BlobQueryError, BlobType, PageRange, PageRangePaged -from ._quick_query_helper import BlobQueryReader -from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper -from ._shared.response_handlers import process_storage_error, return_response_headers -from ._serialize import ( - get_access_conditions, - get_api_version, - get_modify_conditions, - get_version_id -) -from ._upload_helpers import ( - upload_append_blob, - upload_block_blob, - upload_page_blob -) - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential - from azure.storage.blob import ContainerClient - from ._models import ( - ContentSettings, - ImmutabilityPolicy, - PremiumPageBlobTier, - SequenceNumberAction, - StandardBlobTier - ) - - -class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods - """A client to interact with a specific blob, although that blob may not yet exist. - - For more optional configuration, please click - `here `__. - - :param str account_url: - The URI to the storage account. In order to create a client given the full URI to the blob, - use the :func:`from_blob_url` classmethod. - :param container_name: The container name for the blob. - :type container_name: str - :param blob_name: The name of the blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob_name: str - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START create_blob_client] - :end-before: [END create_blob_client] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a URL to a public blob (no auth needed). - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START create_blob_client_sas_url] - :end-before: [END create_blob_client_sas_url] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a SAS URL to a blob. - """ - def __init__( - self, account_url: str, - container_name: str, - blob_name: str, - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - parsed_url, sas_token, path_snapshot = _parse_url( - account_url=account_url, - container_name=container_name, - blob_name=blob_name) - self.container_name = container_name - self.blob_name = blob_name - - if snapshot is not None and hasattr(snapshot, 'snapshot'): - self.snapshot = snapshot.snapshot - elif isinstance(snapshot, dict): - self.snapshot = snapshot['snapshot'] - else: - self.snapshot = snapshot or path_snapshot - self.version_id = kwargs.pop('version_id', None) - - # This parameter is used for the hierarchy traversal. Give precedence to credential. - self._raw_credential = credential if credential else sas_token - self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) - super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] # pylint: disable=protected-access - self._configure_encryption(kwargs) - - def _format_url(self, hostname: str) -> str: - return _format_url( - container_name=self.container_name, - scheme=self.scheme, - blob_name=self.blob_name, - query_str=self._query_str, - hostname=hostname - ) - - @classmethod - def from_blob_url( - cls, blob_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> Self: - """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. - - :param str blob_url: - The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be - either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. - :type blob_url: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. If specified, this will override - the snapshot in the url. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob client. - :rtype: ~azure.storage.blob.BlobClient - """ - account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) - return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=path_snapshot, credential=credential, **kwargs - ) - - @classmethod - def from_connection_string( - cls, conn_str: str, - container_name: str, - blob_name: str, - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create BlobClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param container_name: The container name for the blob. - :type container_name: str - :param blob_name: The name of the blob with which to interact. - :type blob_name: str - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob client. - :rtype: ~azure.storage.blob.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string_blob] - :end-before: [END auth_from_connection_string_blob] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=snapshot, credential=credential, **kwargs - ) - - @distributed_trace - def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account in which the blob resides. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - """ - try: - return cast(Dict[str, str], self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def upload_blob_from_url(self, source_url: str, **kwargs: Any) -> Dict[str, Any]: - """ - Creates a new Block Blob where the content of the blob is read from a given URL. - The content of an existing blob is overwritten with the new blob. - - :param str source_url: - A URL of up to 2 KB in length that specifies a file or blob. - The value should be URL-encoded as it would appear in a request URI. - The source must either be public or must be authenticated via a shared - access signature as part of the url or using the source_authorization keyword. - If the source is public, no authentication is required. - Examples: - https://myaccount.blob.core.windows.net/mycontainer/myblob - - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - - https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. - :keyword bool include_source_blob_properties: - Indicates if properties from the source blob should be copied. Defaults to True. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :paramtype tags: dict(str, str) - :keyword bytearray source_content_md5: - Specify the md5 that is used to verify the integrity of the source bytes. - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword destination_lease: - The lease ID specified for this header must match the lease ID of the - destination blob. If the request does not include the lease ID or it is not - valid, the operation fails with status code 412 (Precondition Failed). - :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Blob-updated property Dict (Etag and last modified) - :rtype: Dict[str, Any] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_blob_from_url_options( - source_url=source_url, - **kwargs) - try: - return cast(Dict[str, Any], self._client.block_blob.put_blob_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def upload_blob( - self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], - blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, - length: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new blob from a data source with automatic chunking. - - :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be - either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. The exception to the above is with Append - blob types: if set to False and the data already exists, an error will not be raised - and the data will be appended to the existing blob. If set overwrite=True, then the existing - append blob will be deleted, and a new one created. Defaults to False. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. If specified, upload_blob only succeeds if the - blob's lease is active and matches this ID. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - Currently this parameter of upload_blob() API is for BlockBlob only. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - Currently this parameter of upload_blob() API is for BlockBlob only. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword progress_hook: - A callback to track the progress of a long running upload. The signature is - function(current: int, total: Optional[int]) where current is the number of bytes transferred - so far, and total is the size of the blob or None if the size is unknown. - :paramtype progress_hook: Callable[[int, Optional[int]], None] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - :returns: Blob-updated property Dict (Etag and last modified) - :rtype: Dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world.py - :start-after: [START upload_a_blob] - :end-before: [END upload_a_blob] - :language: python - :dedent: 12 - :caption: Upload a blob to the container. - """ - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_blob_options( - data=data, - blob_type=blob_type, - length=length, - metadata=metadata, - encryption_options={ - 'required': self.require_encryption, - 'version': self.encryption_version, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function - }, - config=self._config, - sdk_moniker=self._sdk_moniker, - client=self._client, - **kwargs) - if blob_type == BlobType.BlockBlob: - return upload_block_blob(**options) - if blob_type == BlobType.PageBlob: - return upload_page_blob(**options) - return upload_append_blob(**options) - - @overload - def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: str, - **kwargs: Any - ) -> StorageStreamDownloader[str]: - ... - - @overload - def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: None = None, - **kwargs: Any - ) -> StorageStreamDownloader[bytes]: - ... - - @distributed_trace - def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: Union[str, None] = None, - **kwargs: Any - ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: - """Downloads a blob to the StorageStreamDownloader. The readall() method must - be used to read all the content or readinto() must be used to download the blob into - a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. - - :param int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to download. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. If specified, download_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword Optional[str] encoding: - Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. - :keyword progress_hook: - A callback to track the progress of a long running download. The signature is - function(current: int, total: int) where current is the number of bytes transferred - so far, and total is the total size of the download. - :paramtype progress_hook: Callable[[int, int], None] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: A streaming object (StorageStreamDownloader) - :rtype: ~azure.storage.blob.StorageStreamDownloader - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world.py - :start-after: [START download_a_blob] - :end-before: [END download_a_blob] - :language: python - :dedent: 12 - :caption: Download a blob. - """ - if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): - raise ValueError("Encryption required but no key was provided.") - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _download_blob_options( - blob_name=self.blob_name, - container_name=self.container_name, - version_id=get_version_id(self.version_id, kwargs), - offset=offset, - length=length, - encoding=encoding, - encryption_options={ - 'required': self.require_encryption, - 'version': self.encryption_version, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function - }, - config=self._config, - sdk_moniker=self._sdk_moniker, - client=self._client, - **kwargs) - return StorageStreamDownloader(**options) - - @distributed_trace - def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: - """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. - This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. - - :param str query_expression: - Required. a query statement. For more details see - https://learn.microsoft.com/azure/storage/blobs/query-acceleration-sql-reference. - :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: - A function to be called on any processing errors returned by the service. - :keyword blob_format: - Optional. Defines the serialization of the data currently stored in the blob. The default is to - treat the blob data as CSV data formatted in the default dialect. This can be overridden with - a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). - These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string - - .. note:: - "ParquetDialect" is in preview, so some features may not work as intended. - - :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect - or ~azure.storage.blob.QuickQueryDialect or str - :keyword output_format: - Optional. Defines the output serialization for the data stream. By default the data will be returned - as it is represented in the blob (Parquet formats default to DelimitedTextDialect). - By providing an output format, the blob data will be reformatted according to that profile. - This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. - These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string - :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect - or List[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A streaming object (BlobQueryReader) - :rtype: ~azure.storage.blob.BlobQueryReader - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_query.py - :start-after: [START query] - :end-before: [END query] - :language: python - :dedent: 4 - :caption: select/project on blob/or blob snapshot data by providing simple query expressions. - """ - errors = kwargs.pop("on_error", None) - error_cls = kwargs.pop("error_cls", BlobQueryError) - encoding = kwargs.pop("encoding", None) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options, delimiter = _quick_query_options(self.snapshot, query_expression, **kwargs) - try: - headers, raw_response_body = self._client.blob.query(**options) - except HttpResponseError as error: - process_storage_error(error) - return BlobQueryReader( - name=self.blob_name, - container=self.container_name, - errors=errors, - record_delimiter=delimiter, - encoding=encoding, - headers=headers, - response=raw_response_body, - error_cls=error_cls) - - @distributed_trace - def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: - """Marks the specified blob for deletion. - - The blob is later deleted during garbage collection. - Note that in order to delete a blob, you must delete all of its - snapshots. You can delete both at the same time with the delete_blob() - operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blob - and retains the blob for a specified number of days. - After the specified number of days, the blob's data is removed from the service during garbage collection. - Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` - option. Soft-deleted blob can be restored using :func:`undelete` operation. - - :param Optional[str] delete_snapshots: - Required if the blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword Optional[str] version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to delete. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword lease: - Required if the blob has an active lease. If specified, delete_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world.py - :start-after: [START delete_blob] - :end-before: [END delete_blob] - :language: python - :dedent: 12 - :caption: Delete a blob. - """ - options = _delete_blob_options( - snapshot=self.snapshot, - version_id=get_version_id(self.version_id, kwargs), - delete_snapshots=delete_snapshots, - **kwargs) - try: - self._client.blob.delete(**options) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def undelete_blob(self, **kwargs: Any) -> None: - """Restores soft-deleted blobs or snapshots. - - Operation will only be successful if used within the specified number of days - set in the delete retention policy. - - If blob versioning is enabled, the base blob cannot be restored using this - method. Instead use :func:`start_copy_from_url` with the URL of the blob version - you wish to promote to the current version. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START undelete_blob] - :end-before: [END undelete_blob] - :language: python - :dedent: 8 - :caption: Undeleting a blob. - """ - try: - self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def exists(self, **kwargs: Any) -> bool: - """ - Returns True if a blob exists with the defined parameters, and returns - False otherwise. - - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to check if it exists. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: boolean - :rtype: bool - """ - version_id = get_version_id(self.version_id, kwargs) - try: - self._client.blob.get_properties( - snapshot=self.snapshot, - version_id=version_id, - **kwargs) - return True - # Encrypted with CPK - except ResourceExistsError: - return True - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceNotFoundError: - return False - - @distributed_trace - def get_blob_properties(self, **kwargs: Any) -> BlobProperties: - """Returns all user-defined metadata, standard HTTP properties, and - system properties for the blob. It does not return the content of the blob. - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to get properties. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: BlobProperties - :rtype: ~azure.storage.blob.BlobProperties - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] - :language: python - :dedent: 8 - :caption: Getting the properties for a blob. - """ - # TODO: extract this out as _get_blob_properties_options - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - try: - cls_method = kwargs.pop('cls', None) - if cls_method: - kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) - blob_props = cast(BlobProperties, self._client.blob.get_properties( - timeout=kwargs.pop('timeout', None), - version_id=version_id, - snapshot=self.snapshot, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=kwargs.pop('cls', None) or deserialize_blob_properties, - cpk_info=cpk_info, - **kwargs)) - except HttpResponseError as error: - process_storage_error(error) - blob_props.name = self.blob_name - if isinstance(blob_props, BlobProperties): - blob_props.container = self.container_name - blob_props.snapshot = self.snapshot - return blob_props - - @distributed_trace - def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: - """Sets system properties on the blob. - - If one property is set for the content_settings, all properties will be overridden. - - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Any] - """ - options = _set_http_headers_options(content_settings=content_settings, **kwargs) - try: - return cast(Dict[str, Any], self._client.blob.set_http_headers(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def set_blob_metadata( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets user-defined metadata for the blob as one or more name-value pairs. - - :param metadata: - Dict containing name and value pairs. Each call to this operation - replaces all existing metadata attached to the blob. To remove all - metadata from the blob, call this operation with no metadata headers. - :type metadata: dict(str, str) - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Union[str, datetime]] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _set_blob_metadata_options(metadata=metadata, **kwargs) - try: - return cast(Dict[str, Union[str, datetime]], self._client.blob.set_metadata(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def set_immutability_policy( - self, immutability_policy: "ImmutabilityPolicy", - **kwargs: Any - ) -> Dict[str, str]: - """The Set Immutability Policy operation sets the immutability policy on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - return cast(Dict[str, str], self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs)) - - @distributed_trace - def delete_immutability_policy(self, **kwargs: Any) -> None: - """The Delete Immutability Policy operation deletes the immutability policy on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - - self._client.blob.delete_immutability_policy(**kwargs) - - @distributed_trace - def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: - """The Set Legal Hold operation sets a legal hold on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :param bool legal_hold: - Specified if a legal hold should be set on the blob. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, Union[str, datetime, bool]] - """ - - return cast(Dict[str, Union[str, datetime, bool]], - self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs)) - - @distributed_trace - def create_page_blob( - self, size: int, - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a new Page Blob of the specified size. - - :param int size: - This specifies the maximum size for the page blob, up to 1 TB. - The page blob size must be aligned to a 512-byte boundary. - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword int sequence_number: - Only for Page blobs. The sequence number is a user-controlled value that you can use to - track requests. The value of the sequence number must be between 0 - and 2^63 - 1.The default value is 0. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_page_blob_options( - size=size, - content_settings=content_settings, - metadata=metadata, - premium_page_blob_tier=premium_page_blob_tier, - **kwargs) - try: - return cast(Dict[str, Any], self._client.page_blob.create(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def create_append_blob( - self, content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a new Append Blob. This operation creates a new 0-length append blob. The content - of any existing blob is overwritten with the newly initialized append blob. To add content to - the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. - - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_append_blob_options( - content_settings=content_settings, - metadata=metadata, - **kwargs) - try: - return cast(Dict[str, Union[str, datetime]], self._client.append_blob.create(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def create_snapshot( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a snapshot of the blob. - - A snapshot is a read-only version of a blob that's taken at a point in time. - It can be read, copied, or deleted, but not modified. Snapshots provide a way - to back up a blob as it appears at a moment in time. - - A snapshot of a blob has the same name as the base blob from which the snapshot - is taken, with a DateTime value appended to indicate the time at which the - snapshot was taken. - - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. - - .. versionadded:: 12.4.0 - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START create_blob_snapshot] - :end-before: [END create_blob_snapshot] - :language: python - :dedent: 8 - :caption: Create a snapshot of the blob. - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_snapshot_options(metadata=metadata, **kwargs) - try: - return cast(Dict[str, Any], self._client.blob.create_snapshot(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def start_copy_from_url( - self, source_url: str, - metadata: Optional[Dict[str, str]] = None, - incremental_copy: bool = False, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Copies a blob from the given URL. - - This operation returns a dictionary containing `copy_status` and `copy_id`, - which can be used to check the status of or abort the copy operation. - `copy_status` will be 'success' if the copy completed synchronously or - 'pending' if the copy has been started asynchronously. For asynchronous copies, - the status can be checked by polling the :func:`get_blob_properties` method and - checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. - The Blob service copies blobs on a best-effort basis. - - The source blob for a copy operation may be a block blob, an append blob, - or a page blob. If the destination blob already exists, it must be of the - same blob type as the source blob. Any existing destination blob will be - overwritten. The destination blob cannot be modified while a copy operation - is in progress. - - When copying from a page blob, the Blob service creates a destination page - blob of the source blob's length, initially containing all zeroes. Then - the source page ranges are enumerated, and non-empty ranges are copied. - - For a block blob or an append blob, the Blob service creates a committed - blob of zero length before returning from this operation. When copying - from a block blob, all committed blocks and their block IDs are copied. - Uncommitted blocks are not copied. At the end of the copy operation, the - destination blob will have the same committed block count as the source. - - When copying from an append blob, all committed blocks are copied. At the - end of the copy operation, the destination blob will have the same committed - block count as the source. - - :param str source_url: - A URL of up to 2 KB in length that specifies a file or blob. - The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. - Examples: - https://myaccount.blob.core.windows.net/mycontainer/myblob - - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - - https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :param metadata: - Name-value pairs associated with the blob as metadata. If no name-value - pairs are specified, the operation will copy the metadata from the - source blob or file to the destination blob. If one or more name-value - pairs are specified, the destination blob is created with the specified - metadata, and metadata is not copied from the source blob or file. - :type metadata: dict(str, str) - :param bool incremental_copy: - Copies the snapshot of the source page blob to a destination page blob. - The snapshot is copied such that only the differential changes between - the previously copied snapshot are transferred to the destination. - The copied snapshots are complete copies of the original snapshot and - can be read or copied from as usual. Defaults to False. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). - - The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. - This option is only available when `incremental_copy=False` and `requires_sync=True`. - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) or Literal["COPY"] - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only if the source - blob has been modified since the specified date/time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only if the source blob - has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only - if the destination blob has been modified since the specified date/time. - If the destination blob has not been modified, the Blob service returns - status code 412 (Precondition Failed). - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only - if the destination blob has not been modified since the specified - date/time. If the destination blob has been modified, the Blob service - returns status code 412 (Precondition Failed). - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword destination_lease: - The lease ID specified for this header must match the lease ID of the - destination blob. If the request does not include the lease ID or it is not - valid, the operation fails with status code 412 (Precondition Failed). - :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword source_lease: - Specify this to perform the Copy Blob operation only if - the lease ID given matches the active lease ID of the source blob. - :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword bool seal_destination_blob: - Seal the destination append blob. This operation is only for append blob. - - .. versionadded:: 12.4.0 - - :keyword bool requires_sync: - Enforces that the service will not return a response until the copy is complete. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. This option is only available when `incremental_copy` is - set to False and `requires_sync` is set to True. - - .. versionadded:: 12.9.0 - - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.10.0 - - :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, Union[str, ~datetime.datetime]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START copy_blob_from_url] - :end-before: [END copy_blob_from_url] - :language: python - :dedent: 12 - :caption: Copy a blob from a URL. - """ - options = _start_copy_from_url_options( - source_url=source_url, - metadata=metadata, - incremental_copy=incremental_copy, - **kwargs) - try: - if incremental_copy: - return cast(Dict[str, Union[str, datetime]], self._client.page_blob.copy_incremental(**options)) - return cast(Dict[str, Union[str, datetime]], self._client.blob.start_copy_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def abort_copy( - self, copy_id: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> None: - """Abort an ongoing copy operation. - - This will leave a destination blob with zero length and full metadata. - This will raise an error if the copy operation has already ended. - - :param copy_id: - The copy operation to abort. This can be either an ID string, or an - instance of BlobProperties. - :type copy_id: str or ~azure.storage.blob.BlobProperties - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START abort_copy_blob_from_url] - :end-before: [END abort_copy_blob_from_url] - :language: python - :dedent: 12 - :caption: Abort copying a blob from URL. - """ - options = _abort_copy_options(copy_id, **kwargs) - try: - self._client.blob.abort_copy_from_url(**options) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, **kwargs: Any) -> BlobLeaseClient: - """Requests a new lease. - - If the blob does not have an active lease, the Blob - Service creates a lease on the blob and returns a new lease. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :param str lease_id: - Proposed lease ID, in a GUID string format. The Blob Service - returns 400 (Invalid request) if the proposed lease ID is not - in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A BlobLeaseClient object. - :rtype: ~azure.storage.blob.BlobLeaseClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START acquire_lease_on_blob] - :end-before: [END acquire_lease_on_blob] - :language: python - :dedent: 8 - :caption: Acquiring a lease on a blob. - """ - lease = BlobLeaseClient(self, lease_id=lease_id) - lease.acquire(lease_duration=lease_duration, **kwargs) - return lease - - @distributed_trace - def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: - """This operation sets the tier on a block blob. - - A block blob's tier determines Hot/Cool/Archive storage type. - This operation does not update the blob's ETag. - - :param standard_blob_tier: - Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', - 'Archive'. The hot tier is optimized for storing data that is accessed - frequently. The cool storage tier is optimized for storing data that - is infrequently accessed and stored for at least a month. The archive - tier is optimized for storing data that is rarely accessed and stored - for at least six months with flexible latency requirements. - :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to download. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :rtype: None - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - if standard_blob_tier is None: - raise ValueError("A StandardBlobTier must be specified") - if self.snapshot and kwargs.get('version_id'): - raise ValueError("Snapshot and version_id cannot be set at the same time") - try: - self._client.blob.set_tier( - tier=standard_blob_tier, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - lease_access_conditions=access_conditions, - version_id=version_id, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def stage_block( - self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new block to be committed as part of a blob. - - :param str block_id: A string value that identifies the block. - The string should be less than or equal to 64 bytes in size. - For a given blob, the block_id must be the same size for each block. - :param data: The blob data. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param int length: Size of the block. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword str encoding: - Defaults to UTF-8. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob property dict. - :rtype: dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _stage_block_options( - block_id=block_id, - data=data, - length=length, - **kwargs) - try: - return cast(Dict[str, Any], self._client.block_blob.stage_block(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def stage_block_from_url( - self, block_id: str, - source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - source_content_md5: Optional[Union[bytes, bytearray]] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new block to be committed as part of a blob where - the contents are read from a URL. - - :param str block_id: A string value that identifies the block. - The string should be less than or equal to 64 bytes in size. - For a given blob, the block_id must be the same size for each block. - :param str source_url: The URL. - :param int source_offset: - Start of byte range to use for the block. - Must be set if source length is provided. - :param int source_length: The size of the block in bytes. - :param bytearray source_content_md5: - Specify the md5 calculated for the range of - bytes that must be read from the copy source. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Blob property dict. - :rtype: dict[str, Any] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _stage_block_from_url_options( - block_id=block_id, - source_url=source_url, - source_offset=source_offset, - source_length=source_length, - source_content_md5=source_content_md5, - **kwargs) - try: - return cast(Dict[str, Any], self._client.block_blob.stage_block_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_block_list( - self, block_list_type: str = "committed", - **kwargs: Any - ) -> Tuple[List[BlobBlock], List[BlobBlock]]: - """The Get Block List operation retrieves the list of blocks that have - been uploaded as part of a block blob. - - :param str block_list_type: - Specifies whether to return the list of committed - blocks, the list of uncommitted blocks, or both lists together. - Possible values include: 'committed', 'uncommitted', 'all' - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: Tuple[List[BlobBlock], List[BlobBlock]] - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - try: - blocks = self._client.block_blob.get_block_list( - list_type=block_list_type, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return _get_block_list_result(blocks) - - @distributed_trace - def commit_block_list( - self, block_list: List[BlobBlock], - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """The Commit Block List operation writes a blob by specifying the list of - block IDs that make up the blob. - - :param list block_list: - List of Blockblobs. - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict[str, str] - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool validate_content: - If true, calculates an MD5 hash of the page content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _commit_block_list_options( - block_list=block_list, - content_settings=content_settings, - metadata=metadata, - **kwargs) - try: - return cast(Dict[str, Any], self._client.block_blob.commit_block_list(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: - """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. - - :param premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :rtype: None - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if premium_page_blob_tier is None: - raise ValueError("A PremiumPageBlobTier must be specified") - try: - self._client.blob.set_tier( - tier=premium_page_blob_tier, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: - """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. - Each call to this operation replaces all existing tags attached to the blob. To remove all - tags from the blob, call this operation with no tags set. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :param tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :type tags: dict(str, str) - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to add tags to. - :keyword bool validate_content: - If true, calculates an MD5 hash of the tags content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Any] - """ - version_id = get_version_id(self.version_id, kwargs) - options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) - try: - return cast(Dict[str, Any], self._client.blob.set_tags(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: - """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :keyword Optional[str] version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to add tags to. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - version_id = get_version_id(self.version_id, kwargs) - options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) - try: - _, tags = self._client.blob.get_tags(**options) - return cast(Dict[str, str], parse_tags(tags)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_page_ranges( - self, offset: Optional[int] = None, - length: Optional[int] = None, - previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: - """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot - of a page blob. - - :param int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param str previous_snapshot_diff: - The snapshot diff parameter that contains an opaque DateTime value that - specifies a previous blob snapshot to be compared - against a more recent snapshot or the current blob. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: - A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. - The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) - """ - warnings.warn( - "get_page_ranges is deprecated, use list_page_ranges instead", - DeprecationWarning - ) - - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - previous_snapshot_diff=previous_snapshot_diff, - **kwargs) - try: - if previous_snapshot_diff: - ranges = self._client.page_blob.get_page_ranges_diff(**options) - else: - ranges = self._client.page_blob.get_page_ranges(**options) - except HttpResponseError as error: - process_storage_error(error) - return get_page_ranges_result(ranges) - - @distributed_trace - def list_page_ranges( - self, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> ItemPaged[PageRange]: - """Returns the list of valid page ranges for a Page Blob or snapshot - of a page blob. If `previous_snapshot` is specified, the result will be - a diff of changes between the target blob and the previous snapshot. - - :keyword int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword previous_snapshot: - A snapshot value that specifies that the response will contain only pages that were changed - between target blob and previous snapshot. Changed pages include both updated and cleared - pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` - is the older of the two. - :paramtype previous_snapshot: str or Dict[str, Any] - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int results_per_page: - The maximum number of page ranges to retrieve per API call. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) of PageRange. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] - """ - results_per_page = kwargs.pop('results_per_page', None) - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - previous_snapshot_diff=previous_snapshot, - **kwargs) - - if previous_snapshot: - command = partial( - self._client.page_blob.get_page_ranges_diff, - **options) - else: - command = partial( - self._client.page_blob.get_page_ranges, - **options) - return ItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=PageRangePaged) - - @distributed_trace - def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url: str, - offset: Optional[int] = None, - length:Optional[int] = None, - **kwargs: Any - ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: - """Returns the list of valid page ranges for a managed disk or snapshot. - - .. note:: - This operation is only available for managed disk accounts. - - .. versionadded:: 12.2.0 - This operation was introduced in API version '2019-07-07'. - - :param str previous_snapshot_url: - Specifies the URL of a previous snapshot of the managed disk. - The response will only contain pages that were changed between the target blob and - its previous snapshot. - :param int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: - A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. - The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) - """ - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - prev_snapshot_url=previous_snapshot_url, - **kwargs) - try: - ranges = self._client.page_blob.get_page_ranges_diff(**options) - except HttpResponseError as error: - process_storage_error(error) - return get_page_ranges_result(ranges) - - @distributed_trace - def set_sequence_number( - self, sequence_number_action: Union[str, "SequenceNumberAction"], - sequence_number: Optional[str] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets the blob sequence number. - - :param str sequence_number_action: - This property indicates how the service should modify the blob's sequence - number. See :class:`~azure.storage.blob.SequenceNumberAction` for more information. - :param str sequence_number: - This property sets the blob's sequence number. The sequence number is a - user-controlled property that you can use to track requests and manage - concurrency issues. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) - try: - return cast(Dict[str, Any], self._client.page_blob.update_sequence_number(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: - """Resizes a page blob to the specified size. - - If the specified value is less than the current size of the blob, - then all pages above the specified value are cleared. - - :param int size: - Size used to resize blob. Maximum size for a page blob is up to 1 TB. - The page blob size must be aligned to a 512-byte boundary. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _resize_blob_options(size=size, **kwargs) - try: - return cast(Dict[str, Any], self._client.page_blob.resize(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def upload_page( - self, page: bytes, - offset: int, - length: int, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """The Upload Pages operation writes a range of pages to a page blob. - - :param bytes page: - Content of the page. - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword bool validate_content: - If true, calculates an MD5 hash of the page content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_page_options( - page=page, - offset=offset, - length=length, - **kwargs) - try: - return cast(Dict[str, Any], self._client.page_blob.upload_pages(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def upload_pages_from_url( - self, source_url: str, - offset: int, - length: int, - source_offset: int, - **kwargs: Any - ) -> Dict[str, Any]: - """ - The Upload Pages operation writes a range of pages to a page blob where - the contents are read from a URL. - - :param str source_url: - The URL of the source data. It can point to any Azure Blob or File, that is either public or has a - shared access signature attached. - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int source_offset: - This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. - The service will read the same number of bytes as the destination range (length-offset). - :keyword bytes source_content_md5: - If given, the service will calculate the MD5 hash of the block content and compare against this value. - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Response after uploading pages from specified URL. - :rtype: Dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_pages_from_url_options( - source_url=source_url, - offset=offset, - length=length, - source_offset=source_offset, - **kwargs - ) - try: - return cast(Dict[str, Any], self._client.page_blob.upload_pages_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: - """Clears a range of pages. - - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _clear_page_options( - offset=offset, - length=length, - **kwargs - ) - try: - return cast(Dict[str, Any], self._client.page_blob.clear_pages(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def append_block( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime, int]]: - """Commits a new block of data to the end of the existing append blob. - - :param data: - Content of the block. This can be bytes, text, an iterable or a file-like object. - :type data: bytes or str or Iterable - :param int length: - Size of the block in bytes. - :keyword bool validate_content: - If true, calculates an MD5 hash of the block content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _append_block_options( - data=data, - length=length, - **kwargs - ) - try: - return cast(Dict[str, Any], self._client.append_blob.append_block(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def append_block_from_url( - self, copy_source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime, int]]: - """ - Creates a new block to be committed as part of a blob, where the contents are read from a source url. - - :param str copy_source_url: - The URL of the source data. It can point to any Azure Blob or File, that is either public or has a - shared access signature attached. - :param int source_offset: - This indicates the start of the range of bytes (inclusive) that has to be taken from the copy source. - :param int source_length: - This indicates the end of the range of bytes that has to be taken from the copy source. - :keyword bytearray source_content_md5: - If given, the service will calculate the MD5 hash of the block content and compare against this value. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the - AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Result after appending a new block. - :rtype: Dict[str, Union[str, datetime, int]] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _append_block_from_url_options( - copy_source_url=copy_source_url, - source_offset=source_offset, - source_length=source_length, - **kwargs - ) - try: - return cast(Dict[str, Union[str, datetime, int]], - self._client.append_blob.append_block_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: - """The Seal operation seals the Append Blob to make it read-only. - - .. versionadded:: 12.4.0 - - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - options = _seal_append_blob_options(**kwargs) - try: - return cast(Dict[str, Any], self._client.append_blob.seal(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def _get_container_client(self) -> "ContainerClient": - """Get a client to interact with the blob's parent container. - - The container need not already exist. Defaults to current blob's credentials. - - :returns: A ContainerClient. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START get_container_client_from_blob_client] - :end-before: [END get_container_client_from_blob_client] - :language: python - :dedent: 8 - :caption: Get container client from blob object. - """ - from ._container_client import ContainerClient - if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access - _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access - ) - else: - _pipeline = self._pipeline # pylint: disable = protected-access - return ContainerClient( - f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py deleted file mode 100644 index 6451762cfb4f..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py +++ /dev/null @@ -1,1242 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-lines - -from io import BytesIO -from typing import ( - Any, AnyStr, AsyncGenerator, AsyncIterable, cast, - Dict, IO, Iterable, List, Optional, Tuple, Union, - TYPE_CHECKING -) -from urllib.parse import quote, unquote, urlparse - -from ._deserialize import deserialize_blob_stream -from ._encryption import modify_user_agent_for_encryption, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION -from ._generated.models import ( - AppendPositionAccessConditions, - BlobHTTPHeaders, - BlockList, - BlockLookupList, - CpkInfo, - DeleteSnapshotsOptionType, - QueryRequest, - SequenceNumberAccessConditions -) -from ._models import ( - BlobBlock, - BlobProperties, - BlobType, - DelimitedJsonDialect, - DelimitedTextDialect, - PremiumPageBlobTier, - QuickQueryDialect -) -from ._serialize import ( - get_access_conditions, - get_cpk_scope_info, - get_modify_conditions, - get_source_conditions, - serialize_blob_tags_header, - serialize_blob_tags, - serialize_query_format -) -from ._shared import encode_base64 -from ._shared.base_client import parse_query -from ._shared.request_handlers import ( - add_metadata_headers, - get_length, - read_length, - validate_and_format_range_headers -) -from ._shared.response_handlers import return_headers_and_deserialized, return_response_headers -from ._shared.uploads import IterStreamer -from ._shared.uploads_async import AsyncIterStreamer -from ._upload_helpers import _any_conditions - -if TYPE_CHECKING: - from urllib.parse import ParseResult - from ._generated import AzureBlobStorage - from ._models import ContentSettings - from ._shared.models import StorageConfiguration - - -def _parse_url( - account_url: str, - container_name: str, - blob_name: str -) -> Tuple["ParseResult", Optional[str], Optional[str]]: - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError as exc: - raise ValueError("Account URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) - - if not (container_name and blob_name): - raise ValueError("Please specify a container name and blob name.") - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {account_url}") - - path_snapshot, sas_token = parse_query(parsed_url.query) - - return parsed_url, sas_token, path_snapshot - -def _format_url(container_name: Union[bytes, str], scheme: str, blob_name: str, query_str: str, hostname: str) -> str: - if isinstance(container_name, str): - container_name = container_name.encode('UTF-8') - return f"{scheme}://{hostname}/{quote(container_name)}/{quote(blob_name, safe='~/')}{query_str}" - -def _encode_source_url(source_url: str) -> str: - parsed_source_url = urlparse(source_url) - source_scheme = parsed_source_url.scheme - source_hostname = parsed_source_url.netloc.rstrip('/') - source_path = unquote(parsed_source_url.path) - source_query = parsed_source_url.query - result = [f"{source_scheme}://{source_hostname}{quote(source_path, safe='~/')}"] - if source_query: - result.append(source_query) - return '?'.join(result) - -def _upload_blob_options( # pylint:disable=too-many-statements - data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], - blob_type: Union[str, BlobType], - length: Optional[int], - metadata: Optional[Dict[str, str]], - encryption_options: Dict[str, Any], - config: "StorageConfiguration", - sdk_moniker: str, - client: "AzureBlobStorage", - **kwargs: Any -) -> Dict[str, Any]: - encoding = kwargs.pop('encoding', 'UTF-8') - if isinstance(data, str): - data = data.encode(encoding) - if length is None: - length = get_length(data) - if isinstance(data, bytes): - data = data[:length] - - stream: Optional[Any] = None - if isinstance(data, bytes): - stream = BytesIO(data) - elif hasattr(data, 'read'): - stream = data - elif hasattr(data, '__iter__') and not isinstance(data, (list, tuple, set, dict)): - stream = IterStreamer(data, encoding=encoding) - elif hasattr(data, '__aiter__'): - stream = AsyncIterStreamer(cast(AsyncGenerator, data), encoding=encoding) - else: - raise TypeError(f"Unsupported data type: {type(data)}") - - validate_content = kwargs.pop('validate_content', False) - content_settings = kwargs.pop('content_settings', None) - overwrite = kwargs.pop('overwrite', False) - max_concurrency = kwargs.pop('max_concurrency', 1) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - kwargs['cpk_info'] = cpk_info - - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) - kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) - kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) - if content_settings: - kwargs['blob_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=content_settings.content_md5, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) - kwargs['stream'] = stream - kwargs['length'] = length - kwargs['overwrite'] = overwrite - kwargs['headers'] = headers - kwargs['validate_content'] = validate_content - kwargs['blob_settings'] = config - kwargs['max_concurrency'] = max_concurrency - kwargs['encryption_options'] = encryption_options - # Add feature flag to user agent for encryption - if encryption_options['key']: - modify_user_agent_for_encryption( - config.user_agent_policy.user_agent, - sdk_moniker, - encryption_options['version'], - kwargs) - - if blob_type == BlobType.BlockBlob: - kwargs['client'] = client.block_blob - elif blob_type == BlobType.PageBlob: - if (encryption_options['version'] == '2.0' and - (encryption_options['required'] or encryption_options['key'] is not None)): - raise ValueError("Encryption version 2.0 does not currently support page blobs.") - kwargs['client'] = client.page_blob - elif blob_type == BlobType.AppendBlob: - if encryption_options['required'] or (encryption_options['key'] is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - kwargs['client'] = client.append_blob - else: - raise ValueError(f"Unsupported BlobType: {blob_type}") - return kwargs - -def _upload_blob_from_url_options(source_url: str, **kwargs: Any ) -> Dict[str, Any]: - source_url = _encode_source_url(source_url=source_url) - tier = kwargs.pop('standard_blob_tier', None) - overwrite = kwargs.pop('overwrite', False) - content_settings = kwargs.pop('content_settings', None) - source_authorization = kwargs.pop('source_authorization', None) - if content_settings: - kwargs['blob_http_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'copy_source_authorization': source_authorization, - 'content_length': 0, - 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), - 'source_content_md5': kwargs.pop('source_content_md5', None), - 'copy_source': source_url, - 'modified_access_conditions': get_modify_conditions(kwargs), - 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), - 'cls': return_response_headers, - 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), - 'tier': tier.value if tier else None, - 'source_modified_access_conditions': get_source_conditions(kwargs), - 'cpk_info': cpk_info, - 'cpk_scope_info': get_cpk_scope_info(kwargs) - } - options.update(kwargs) - if not overwrite and not _any_conditions(**options): # pylint: disable=protected-access - options['modified_access_conditions'].if_none_match = '*' - return options - -def _download_blob_options( - blob_name: str, - container_name: str, - version_id: Optional[str], - offset: Optional[int], - length: Optional[int], - encoding: Optional[str], - encryption_options: Dict[str, Any], - config: "StorageConfiguration", - sdk_moniker: str, - client: "AzureBlobStorage", - **kwargs -) -> Dict[str, Any]: - """Creates a dictionary containing the options for a download blob operation. - - :param str blob_name: - The name of the blob. - :param str container_name: - The name of the container. - :param Optional[str] version_id: - The version id parameter is a value that, when present, specifies the version of the blob to download. - :param Optional[int] offset: - Start of byte range to use for downloading a section of the blob. Must be set if length is provided. - :param Optional[int] length: - Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. - :param Optional[str] encoding: - Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. - :param Dict[str, Any] encryption_options: - The options for encryption, if enabled. - :param StorageConfiguration config: - The Storage configuration options. - :param str sdk_moniker: - The string representing the SDK package version. - :param AzureBlobStorage client: - The generated Blob Storage client. - :returns: A dictionary containing the download blob options. - :rtype: Dict[str, Any] - """ - if length is not None: - if offset is None: - raise ValueError("Offset must be provided if length is provided.") - length = offset + length - 1 # Service actually uses an end-range inclusive index - - validate_content = kwargs.pop('validate_content', False) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - # Add feature flag to user agent for encryption - if encryption_options['key'] or encryption_options['resolver']: - modify_user_agent_for_encryption( - config.user_agent_policy.user_agent, - sdk_moniker, - encryption_options['version'], - kwargs) - - options = { - 'clients': client, - 'config': config, - 'start_range': offset, - 'end_range': length, - 'version_id': version_id, - 'validate_content': validate_content, - 'encryption_options': { - 'required': encryption_options['required'], - 'key': encryption_options['key'], - 'resolver': encryption_options['resolver']}, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'download_cls': kwargs.pop('cls', None) or deserialize_blob_stream, - 'max_concurrency':kwargs.pop('max_concurrency', 1), - 'encoding': encoding, - 'timeout': kwargs.pop('timeout', None), - 'name': blob_name, - 'container': container_name} - options.update(kwargs) - return options - -def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwargs: Any ) -> Tuple[Dict[str, Any], str]: - delimiter = '\n' - input_format = kwargs.pop('blob_format', None) - if input_format == QuickQueryDialect.DelimitedJson: - input_format = DelimitedJsonDialect() - if input_format == QuickQueryDialect.DelimitedText: - input_format = DelimitedTextDialect() - input_parquet_format = input_format == "ParquetDialect" - if input_format and not input_parquet_format: - try: - delimiter = input_format.lineterminator - except AttributeError: - try: - delimiter = input_format.delimiter - except AttributeError as exc: - raise ValueError("The Type of blob_format can only be DelimitedTextDialect or " - "DelimitedJsonDialect or ParquetDialect") from exc - output_format = kwargs.pop('output_format', None) - if output_format == QuickQueryDialect.DelimitedJson: - output_format = DelimitedJsonDialect() - if output_format == QuickQueryDialect.DelimitedText: - output_format = DelimitedTextDialect() - if output_format: - if output_format == "ParquetDialect": - raise ValueError("ParquetDialect is invalid as an output format.") - try: - delimiter = output_format.lineterminator - except AttributeError: - try: - delimiter = output_format.delimiter - except AttributeError: - pass - else: - output_format = input_format if not input_parquet_format else None - query_request = QueryRequest( - expression=query_expression, - input_serialization=serialize_query_format(input_format), - output_serialization=serialize_query_format(output_format) - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo( - encryption_key=cpk.key_value, - encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm - ) - options = { - 'query_request': query_request, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'snapshot': snapshot, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized, - } - options.update(kwargs) - return options, delimiter - -def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if delete_snapshots: - delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) - options = { - 'timeout': kwargs.pop('timeout', None), - 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs - 'delete_snapshots': delete_snapshots or None, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions} - options.update(kwargs) - return options - -def _delete_blob_options( - snapshot: Optional[str], - version_id: Optional[str], - delete_snapshots: Optional[str] = None, - **kwargs: Any -) -> Dict[str, Any]: - if snapshot and delete_snapshots: - raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") - options = _generic_delete_blob_options(delete_snapshots, **kwargs) - options['snapshot'] = snapshot - options['version_id'] = version_id - options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) - return options - -def _set_http_headers_options(content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=content_settings.content_md5, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - options = { - 'timeout': kwargs.pop('timeout', None), - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any): - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - -def _create_page_blob_options( - size: int, - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, - **kwargs: Any -) -> Dict[str, Any]: - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=content_settings.content_md5, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - sequence_number = kwargs.pop('sequence_number', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - immutability_policy = kwargs.pop('immutability_policy', None) - if immutability_policy: - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - - tier = None - if premium_page_blob_tier: - try: - tier = premium_page_blob_tier.value # type: ignore - except AttributeError: - tier = premium_page_blob_tier # type: ignore - - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_content_length': size, - 'blob_sequence_number': sequence_number, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - "tier": tier, - 'headers': headers} - options.update(kwargs) - return options - -def _create_append_blob_options( - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any -) -> Dict[str, Any]: - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=content_settings.content_md5, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - immutability_policy = kwargs.pop('immutability_policy', None) - if immutability_policy: - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - -def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - -def _start_copy_from_url_options( # pylint:disable=too-many-statements - source_url: str, - metadata: Optional[Dict[str, str]] = None, - incremental_copy: bool = False, - **kwargs: Any -) -> Dict[str, Any]: - source_url = _encode_source_url(source_url=source_url) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - if 'source_lease' in kwargs: - source_lease = kwargs.pop('source_lease') - try: - headers['x-ms-source-lease-id'] = source_lease.id - except AttributeError: - headers['x-ms-source-lease-id'] = source_lease - - tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) - tags = kwargs.pop('tags', None) - - # Options only available for sync copy - requires_sync = kwargs.pop('requires_sync', None) - encryption_scope_str = kwargs.pop('encryption_scope', None) - source_authorization = kwargs.pop('source_authorization', None) - # If tags is a str, interpret that as copy_source_tags - copy_source_tags = isinstance(tags, str) - - if incremental_copy: - if source_authorization: - raise ValueError("Source authorization tokens are not applicable for incremental copying.") - if copy_source_tags: - raise ValueError("Copying source tags is not applicable for incremental copying.") - - # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url - # when requires_sync=True is set. - # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. - # As sync copy diverges more from async copy, more problem will surface. - if requires_sync is True: - headers['x-ms-requires-sync'] = str(requires_sync) - if encryption_scope_str: - headers['x-ms-encryption-scope'] = encryption_scope_str - if source_authorization: - headers['x-ms-copy-source-authorization'] = source_authorization - if copy_source_tags: - headers['x-ms-copy-source-tag-option'] = tags - else: - if encryption_scope_str: - raise ValueError( - "Encryption_scope is only supported for sync copy, please specify requires_sync=True") - if source_authorization: - raise ValueError( - "Source authorization tokens are only supported for sync copy, please specify requires_sync=True") - if copy_source_tags: - raise ValueError( - "Copying source tags is only supported for sync copy, please specify requires_sync=True") - - timeout = kwargs.pop('timeout', None) - dest_mod_conditions = get_modify_conditions(kwargs) - blob_tags_string = serialize_blob_tags_header(tags) if not copy_source_tags else None - - immutability_policy = kwargs.pop('immutability_policy', None) - if immutability_policy: - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - - options = { - 'copy_source': source_url, - 'seal_blob': kwargs.pop('seal_destination_blob', None), - 'timeout': timeout, - 'modified_access_conditions': dest_mod_conditions, - 'blob_tags_string': blob_tags_string, - 'headers': headers, - 'cls': return_response_headers, - } - if not incremental_copy: - source_mod_conditions = get_source_conditions(kwargs) - dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) - options['source_modified_access_conditions'] = source_mod_conditions - options['lease_access_conditions'] = dest_access_conditions - options['tier'] = tier.value if tier else None - options.update(kwargs) - return options - -def _abort_copy_options(copy_id: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - if isinstance(copy_id, BlobProperties): - copy_id = copy_id.copy.id # type: ignore [assignment] - elif isinstance(copy_id, dict): - copy_id = copy_id['copy_id'] - options = { - 'copy_id': copy_id, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - -def _stage_block_options( - block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - block_id = encode_base64(str(block_id)) - if isinstance(data, str): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if isinstance(data, bytes): - data = data[:length] - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'block_id': block_id, - 'content_length': length, - 'body': data, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - -def _stage_block_from_url_options( - block_id: str, - source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - source_content_md5: Optional[Union[bytes, bytearray]] = None, - **kwargs: Any -) -> Dict[str, Any]: - source_url = _encode_source_url(source_url=source_url) - source_authorization = kwargs.pop('source_authorization', None) - if source_length is not None and source_offset is None: - raise ValueError("Source offset value must not be None if length is set.") - if source_length is not None and source_offset is not None: - source_length = source_offset + source_length - 1 - block_id = encode_base64(str(block_id)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - range_header = None - if source_offset is not None: - range_header, _ = validate_and_format_range_headers(source_offset, source_length) - - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'copy_source_authorization': source_authorization, - 'block_id': block_id, - 'content_length': 0, - 'source_url': source_url, - 'source_range': range_header, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - -def _get_block_list_result(blocks: BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]]: - committed = [] - uncommitted = [] - if blocks.committed_blocks: - committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access - if blocks.uncommitted_blocks: - uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access - return committed, uncommitted - -def _commit_block_list_options( - block_list: List[BlobBlock], - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any -) -> Dict[str, Any]: - block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) - for block in block_list: - if isinstance(block, BlobBlock): - if block.state.value == 'committed': - cast(List[str], block_lookup.committed).append(encode_base64(str(block.id))) - elif block.state.value == 'uncommitted': - cast(List[str], block_lookup.uncommitted).append(encode_base64(str(block.id))) - elif block_lookup.latest is not None: - block_lookup.latest.append(encode_base64(str(block.id))) - else: - block_lookup.latest.append(encode_base64(str(block))) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - blob_headers = None - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=content_settings.content_md5, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - immutability_policy = kwargs.pop('immutability_policy', None) - if immutability_policy: - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - - tier = kwargs.pop('standard_blob_tier', None) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'blocks': block_lookup, - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None), - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'tier': tier.value if tier else None, - 'blob_tags_string': blob_tags_string, - 'headers': headers - } - options.update(kwargs) - return options - -def _set_blob_tags_options( - version_id: Optional[str], - tags: Optional[Dict[str, str]] = None, - **kwargs: Any -)-> Dict[str, Any]: - serialized_tags = serialize_blob_tags(tags) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'tags': serialized_tags, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'version_id': version_id, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _get_blob_tags_options(version_id: Optional[str], snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'version_id': version_id, - 'snapshot': snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized} - return options - -def _get_page_ranges_options( - snapshot: Optional[str], - offset: Optional[int] = None, - length: Optional[int] = None, - previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any -) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if length is not None and offset is not None: - length = offset + length - 1 # Reformat to an inclusive range index - page_range, _ = validate_and_format_range_headers( - offset, length, start_range_required=False, end_range_required=False, align_to_page=True - ) - options = { - 'snapshot': snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'range': page_range} - if previous_snapshot_diff: - try: - options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore - except AttributeError: - try: - options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore - except TypeError: - options['prevsnapshot'] = previous_snapshot_diff - options.update(kwargs) - return options - -def _set_sequence_number_options( - sequence_number_action: str, - sequence_number: Optional[str] = None, - **kwargs: Any -) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if sequence_number_action is None: - raise ValueError("A sequence number action must be specified") - options = { - 'sequence_number_action': sequence_number_action, - 'timeout': kwargs.pop('timeout', None), - 'blob_sequence_number': sequence_number, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _resize_blob_options(size: int, **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if size is None: - raise ValueError("A content length must be specified for a Page Blob.") - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'blob_content_length': size, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _upload_page_options( - page: bytes, - offset: int, - length: int, - **kwargs: Any -) -> Dict[str, Any]: - if isinstance(page, str): - page = page.encode(kwargs.pop('encoding', 'UTF-8')) - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = offset + length - 1 # Reformat to an inclusive range index - content_range = f'bytes={offset}-{end_range}' # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - validate_content = kwargs.pop('validate_content', False) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': page[:length], - 'content_length': length, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _upload_pages_from_url_options( - source_url: str, - offset: int, - length: int, - source_offset: int, - **kwargs: Any -) -> Dict[str, Any]: - source_url = _encode_source_url(source_url=source_url) - # TODO: extract the code to a method format_range - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - if source_offset is None or offset % 512 != 0: - raise ValueError("source_offset must be an integer that aligns with 512 page size") - - # Format range - end_range = offset + length - 1 - destination_range = f'bytes={offset}-{end_range}' - source_range = f'bytes={source_offset}-{source_offset + length - 1}' # should subtract 1 here? - - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - source_authorization = kwargs.pop('source_authorization', None) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - source_content_md5 = kwargs.pop('source_content_md5', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'copy_source_authorization': source_authorization, - 'source_url': source_url, - 'content_length': 0, - 'source_range': source_range, - 'range': destination_range, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _clear_page_options( - offset: int, - length: int, - **kwargs: Any -) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = length + offset - 1 # Reformat to an inclusive range index - content_range = f'bytes={offset}-{end_range}' - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'content_length': 0, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _append_block_options( - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - if isinstance(data, str): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if length == 0: - return {} - if isinstance(data, bytes): - data = data[:length] - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - validate_content = kwargs.pop('validate_content', False) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': data, - 'content_length': length, - 'timeout': kwargs.pop('timeout', None), - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _append_block_from_url_options( - copy_source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - copy_source_url = _encode_source_url(source_url=copy_source_url) - # If end range is provided, start range must be provided - if source_length is not None and source_offset is None: - raise ValueError("source_offset should also be specified if source_length is specified") - # Format based on whether length is present - source_range = None - if source_length is not None and source_offset is not None: - end_range = source_offset + source_length - 1 - source_range = f'bytes={source_offset}-{end_range}' - elif source_offset is not None: - source_range = f"bytes={source_offset}-" - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - source_content_md5 = kwargs.pop('source_content_md5', None) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - source_authorization = kwargs.pop('source_authorization', None) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'copy_source_authorization': source_authorization, - 'source_url': copy_source_url, - 'content_length': 0, - 'source_range': source_range, - 'source_content_md5': source_content_md5, - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - -def _seal_append_blob_options(**kwargs: Any) -> Dict[str, Any]: - appendpos_condition = kwargs.pop('appendpos_condition', None) - append_conditions = None - if appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - -def _from_blob_url( - blob_url: str, - snapshot: Optional[Union[BlobProperties, str, Dict[str, Any]]] -) -> Tuple[str, str, str, Optional[str]]: - try: - if not blob_url.lower().startswith('http'): - blob_url = "https://" + blob_url - except AttributeError as exc: - raise ValueError("Blob URL must be a string.") from exc - parsed_url = urlparse(blob_url.rstrip('/')) - - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {blob_url}") - - account_path = "" - if ".core." in parsed_url.netloc: - # .core. is indicating non-customized url. Blob name with directory info can also be parsed. - path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=1) - elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: - path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=2) - account_path += '/' + path_blob[0] - else: - # for customized url. blob name that has directory info cannot be parsed. - path_blob = parsed_url.path.lstrip('/').split('/') - if len(path_blob) > 2: - account_path = "/" + "/".join(path_blob[:-2]) - - account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" - - msg_invalid_url = "Invalid URL. Provide a blob_url with a valid blob and container name." - if len(path_blob) <= 1: - raise ValueError(msg_invalid_url) - container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) - if not container_name or not blob_name: - raise ValueError(msg_invalid_url) - - path_snapshot, _ = parse_query(parsed_url.query) - if snapshot: - if isinstance(snapshot, BlobProperties): - path_snapshot = snapshot.snapshot - elif isinstance(snapshot, dict): - path_snapshot = snapshot['snapshot'] - else: - path_snapshot = snapshot - return (account_url, container_name, blob_name, path_snapshot) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py deleted file mode 100644 index 52a65c5cdd93..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py +++ /dev/null @@ -1,788 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -import functools -import warnings -from typing import ( - Any, Dict, List, Optional, Union, - TYPE_CHECKING -) -from typing_extensions import Self - -from azure.core.exceptions import HttpResponseError -from azure.core.paging import ItemPaged -from azure.core.pipeline import Pipeline -from azure.core.tracing.decorator import distributed_trace -from ._blob_client import BlobClient -from ._blob_service_client_helpers import _parse_url -from ._container_client import ContainerClient -from ._deserialize import service_properties_deserialize, service_stats_deserialize -from ._encryption import StorageEncryptionMixin -from ._generated import AzureBlobStorage -from ._generated.models import KeyInfo, StorageServiceProperties -from ._list_blobs_helper import FilteredBlobPaged -from ._models import BlobProperties, ContainerProperties, ContainerPropertiesPaged, CorsRule -from ._serialize import get_api_version -from ._shared.base_client import parse_connection_str, parse_query, StorageAccountHostsMixin, TransportWrapper -from ._shared.models import LocationMode -from ._shared.parser import _to_utc_datetime -from ._shared.response_handlers import ( - parse_to_internal_user_delegation_key, - process_storage_error, - return_response_headers -) - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential - from datetime import datetime - from ._lease import BlobLeaseClient - from ._models import ( - BlobAnalyticsLogging, - FilteredBlob, - Metrics, - PublicAccess, - RetentionPolicy, - StaticWebsite - ) - from ._shared.models import UserDelegationKey - - -class BlobServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): - """A client to interact with the Blob Service at the account level. - - This client provides operations to retrieve and configure the account properties - as well as list, create and delete containers within the account. - For operations relating to a specific container or blob, clients for those entities - can also be retrieved using the `get_client` functions. - - For more optional configuration, please click - `here `__. - - :param str account_url: - The URL to the blob storage account. Any other entities included - in the URL path (e.g. container or blob) will be discarded. This URL can be optionally - authenticated with a SAS token. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START create_blob_service_client] - :end-before: [END create_blob_service_client] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient with account url and credential. - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START create_blob_service_client_oauth] - :end-before: [END create_blob_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient with Default Azure Identity credentials. - """ - - def __init__( - self, account_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - parsed_url, sas_token = _parse_url(account_url=account_url) - _, sas_token = parse_query(parsed_url.query) - self._query_str, credential = self._format_query_string(sas_token, credential) - super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] # pylint: disable=protected-access - self._configure_encryption(kwargs) - - def _format_url(self, hostname): - """Format the endpoint URL according to the current location - mode hostname. - - :param str hostname: - The hostname of the current location mode. - :returns: A formatted endpoint URL including current location mode hostname. - :rtype: str - """ - return f"{self.scheme}://{hostname}/{self._query_str}" - - @classmethod - def from_connection_string( - cls, conn_str: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create BlobServiceClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob service client. - :rtype: ~azure.storage.blob.BlobServiceClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string] - :end-before: [END auth_from_connection_string] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, credential=credential, **kwargs) - - @distributed_trace - def get_user_delegation_key( - self, key_start_time: "datetime", - key_expiry_time: "datetime", - **kwargs: Any - ) -> "UserDelegationKey": - """ - Obtain a user delegation key for the purpose of signing SAS tokens. - A token credential must be present on the service object for this request to succeed. - - :param ~datetime.datetime key_start_time: - A DateTime value. Indicates when the key becomes valid. - :param ~datetime.datetime key_expiry_time: - A DateTime value. Indicates when the key stops being valid. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The user delegation key. - :rtype: ~azure.storage.blob.UserDelegationKey - """ - key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) - timeout = kwargs.pop('timeout', None) - try: - user_delegation_key = self._client.service.get_user_delegation_key(key_info=key_info, - timeout=timeout, - **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore - - @distributed_trace - def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START get_blob_service_account_info] - :end-before: [END get_blob_service_account_info] - :language: python - :dedent: 8 - :caption: Getting account information for the blob service. - """ - try: - return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: - """Retrieves statistics related to replication for the Blob service. - - It is only available when read-access geo-redundant replication is enabled for - the storage account. - - With geo-redundant replication, Azure Storage maintains your data durable - in two locations. In both locations, Azure Storage constantly maintains - multiple healthy replicas of your data. The location where you read, - create, update, or delete data is the primary storage account location. - The primary location exists in the region you choose at the time you - create an account via the Azure Management Azure classic portal, for - example, North Central US. The location to which your data is replicated - is the secondary location. The secondary location is automatically - determined based on the location of the primary; it is in a second data - center that resides in the same region as the primary location. Read-only - access is available from the secondary location, if read-access geo-redundant - replication is enabled for your storage account. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The blob service stats. - :rtype: Dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START get_blob_service_stats] - :end-before: [END get_blob_service_stats] - :language: python - :dedent: 8 - :caption: Getting service stats for the blob service. - """ - timeout = kwargs.pop('timeout', None) - try: - stats = self._client.service.get_statistics( # type: ignore - timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) - return service_stats_deserialize(stats) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: - """Gets the properties of a storage account's Blob service, including - Azure Storage Analytics. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An object containing blob service properties such as - analytics logging, hour/minute metrics, cors rules, etc. - :rtype: Dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START get_blob_service_properties] - :end-before: [END get_blob_service_properties] - :language: python - :dedent: 8 - :caption: Getting service properties for the blob service. - """ - timeout = kwargs.pop('timeout', None) - try: - service_props = self._client.service.get_properties(timeout=timeout, **kwargs) - return service_properties_deserialize(service_props) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def set_service_properties( - self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, - hour_metrics: Optional["Metrics"] = None, - minute_metrics: Optional["Metrics"] = None, - cors: Optional[List[CorsRule]] = None, - target_version: Optional[str] = None, - delete_retention_policy: Optional["RetentionPolicy"] = None, - static_website: Optional["StaticWebsite"] = None, - **kwargs: Any - ) -> None: - """Sets the properties of a storage account's Blob service, including - Azure Storage Analytics. - - If an element (e.g. analytics_logging) is left as None, the - existing settings on the service for that functionality are preserved. - - :param analytics_logging: - Groups the Azure Analytics Logging settings. - :type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging - :param hour_metrics: - The hour metrics settings provide a summary of request - statistics grouped by API in hourly aggregates for blobs. - :type hour_metrics: ~azure.storage.blob.Metrics - :param minute_metrics: - The minute metrics settings provide request statistics - for each minute for blobs. - :type minute_metrics: ~azure.storage.blob.Metrics - :param cors: - You can include up to five CorsRule elements in the - list. If an empty list is specified, all CORS rules will be deleted, - and CORS will be disabled for the service. - :type cors: list[~azure.storage.blob.CorsRule] - :param str target_version: - Indicates the default version to use for requests if an incoming - request's version is not specified. - :param delete_retention_policy: - The delete retention policy specifies whether to retain deleted blobs. - It also specifies the number of days and versions of blob to keep. - :type delete_retention_policy: ~azure.storage.blob.RetentionPolicy - :param static_website: - Specifies whether the static website feature is enabled, - and if yes, indicates the index document and 404 error document to use. - :type static_website: ~azure.storage.blob.StaticWebsite - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START set_blob_service_properties] - :end-before: [END set_blob_service_properties] - :language: python - :dedent: 8 - :caption: Setting service properties for the blob service. - """ - if all(parameter is None for parameter in [ - analytics_logging, hour_metrics, minute_metrics, cors, - target_version, delete_retention_policy, static_website]): - raise ValueError("set_service_properties should be called with at least one parameter") - - props = StorageServiceProperties( - logging=analytics_logging, - hour_metrics=hour_metrics, - minute_metrics=minute_metrics, - cors=CorsRule._to_generated(cors), # pylint: disable=protected-access - default_service_version=target_version, - delete_retention_policy=delete_retention_policy, - static_website=static_website - ) - timeout = kwargs.pop('timeout', None) - try: - self._client.service.set_properties(props, timeout=timeout, **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def list_containers( - self, name_starts_with: Optional[str] = None, - include_metadata: bool = False, - **kwargs: Any - ) -> ItemPaged[ContainerProperties]: - """Returns a generator to list the containers under the specified account. - - The generator will lazily follow the continuation tokens returned by - the service and stop when all containers have been returned. - - :param str name_starts_with: - Filters the results to return only containers whose names - begin with the specified prefix. - :param bool include_metadata: - Specifies that container metadata to be returned in the response. - The default value is `False`. - :keyword bool include_deleted: - Specifies that deleted containers to be returned in the response. This is for container restore enabled - account. The default value is `False`. - .. versionadded:: 12.4.0 - :keyword bool include_system: - Flag specifying that system containers should be included. - .. versionadded:: 12.10.0 - :keyword int results_per_page: - The maximum number of container names to retrieve per API - call. If the request does not specify the server will return up to 5,000 items. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) of ContainerProperties. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START bsc_list_containers] - :end-before: [END bsc_list_containers] - :language: python - :dedent: 12 - :caption: Listing the containers in the blob service. - """ - include = ['metadata'] if include_metadata else [] - include_deleted = kwargs.pop('include_deleted', None) - if include_deleted: - include.append("deleted") - include_system = kwargs.pop('include_system', None) - if include_system: - include.append("system") - - timeout = kwargs.pop('timeout', None) - results_per_page = kwargs.pop('results_per_page', None) - command = functools.partial( - self._client.service.list_containers_segment, - prefix=name_starts_with, - include=include, - timeout=timeout, - **kwargs) - return ItemPaged( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - page_iterator_class=ContainerPropertiesPaged - ) - - @distributed_trace - def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> ItemPaged["FilteredBlob"]: - """The Filter Blobs operation enables callers to list blobs across all - containers whose tags match a given search expression. Filter blobs - searches across all containers within a storage account but can be - scoped within the expression to a single container. - - :param str filter_expression: - The expression to find blobs whose tags matches the specified condition. - eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" - To specify a container, eg. "@container='containerName' and \"Name\"='C'" - :keyword int results_per_page: - The max result per page when paginating. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob] - """ - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.service.filter_blobs, - where=filter_expression, - timeout=timeout, - **kwargs) - return ItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=FilteredBlobPaged) - - @distributed_trace - def create_container( - self, name: str, - metadata: Optional[Dict[str, str]] = None, - public_access: Optional[Union["PublicAccess", str]] = None, - **kwargs: Any - ) -> ContainerClient: - """Creates a new container under the specified account. - - If the container with the same name already exists, a ResourceExistsError will - be raised. This method returns a client with which to interact with the newly - created container. - - :param str name: The name of the container to create. - :param metadata: - A dict with name-value pairs to associate with the - container as metadata. Example: `{'Category':'test'}` - :type metadata: dict(str, str) - :param public_access: - Possible values include: 'container', 'blob'. - :type public_access: str or ~azure.storage.blob.PublicAccess - :keyword container_encryption_scope: - Specifies the default encryption scope to set on the container and use for - all future writes. - - .. versionadded:: 12.2.0 - - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A container client to interact with the newly created container. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START bsc_create_container] - :end-before: [END bsc_create_container] - :language: python - :dedent: 12 - :caption: Creating a container in the blob service. - """ - container = self.get_container_client(name) - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - container.create_container( - metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) - return container - - @distributed_trace - def delete_container( - self, container: Union[ContainerProperties, str], - lease: Optional[Union["BlobLeaseClient", str]] = None, - **kwargs: Any - ) -> None: - """Marks the specified container for deletion. - - The container and any blobs contained within it are later deleted during garbage collection. - If the container is not found, a ResourceNotFoundError will be raised. - - :param container: - The container to delete. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :param lease: - If specified, delete_container only succeeds if the - container's lease is active and matches this ID. - Required if the container has an active lease. - :type lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START bsc_delete_container] - :end-before: [END bsc_delete_container] - :language: python - :dedent: 12 - :caption: Deleting a container in the blob service. - """ - container_client = self.get_container_client(container) - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - container_client.delete_container( - lease=lease, - timeout=timeout, - **kwargs) - - @distributed_trace - def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: - """Renames a container. - - Operation is successful only if the source container exists. - - :param str name: - The name of the container to rename. - :param str new_name: - The new container name the user wants to rename to. - :keyword lease: - Specify this to perform only if the lease ID given - matches the active lease ID of the source container. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A container client for the renamed container. - :rtype: ~azure.storage.blob.ContainerClient - """ - renamed_container = self.get_container_client(new_name) - lease = kwargs.pop('lease', None) - try: - kwargs['source_lease_id'] = lease.id - except AttributeError: - kwargs['source_lease_id'] = lease - try: - renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access - return renamed_container - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def undelete_container( - self, deleted_container_name: str, - deleted_container_version: str, - **kwargs: Any - ) -> ContainerClient: - """Restores soft-deleted container. - - Operation will only be successful if used within the specified number of days - set in the delete retention policy. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :param str deleted_container_name: - Specifies the name of the deleted container to restore. - :param str deleted_container_version: - Specifies the version of the deleted container to restore. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The undeleted ContainerClient. - :rtype: ~azure.storage.blob.ContainerClient - """ - new_name = kwargs.pop('new_name', None) - if new_name: - warnings.warn("`new_name` is no longer supported.", DeprecationWarning) - container = self.get_container_client(new_name or deleted_container_name) - try: - container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access - deleted_container_version=deleted_container_version, - timeout=kwargs.pop('timeout', None), **kwargs) - return container - except HttpResponseError as error: - process_storage_error(error) - - def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: - """Get a client to interact with the specified container. - - The container need not already exist. - - :param container: - The container. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :returns: A ContainerClient. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START bsc_get_container_client] - :end-before: [END bsc_get_container_client] - :language: python - :dedent: 8 - :caption: Getting the container client to interact with a specific container. - """ - if isinstance(container, ContainerProperties): - container_name = container.name - else: - container_name = container - _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access - ) - return ContainerClient( - self.url, container_name=container_name, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - - def get_blob_client( - self, container: Union[ContainerProperties, str], - blob: str, - snapshot: Optional[Union[Dict[str, Any], str]] = None, - *, - version_id: Optional[str] = None - ) -> BlobClient: - """Get a client to interact with the specified blob. - - The blob need not already exist. - - :param container: - The container that the blob is in. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :param str blob: The name of the blob with which to interact. - :param snapshot: - The optional blob snapshot on which to operate. This can either be the ID of the snapshot, - or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`. - :type snapshot: str or dict(str, Any) - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :returns: A BlobClient. - :rtype: ~azure.storage.blob.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START bsc_get_blob_client] - :end-before: [END bsc_get_blob_client] - :language: python - :dedent: 12 - :caption: Getting the blob client to interact with a specific blob. - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_name = blob.name - else: - blob_name = blob - if isinstance(container, ContainerProperties): - container_name = container.name - else: - container_name = container - _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access - ) - return BlobClient( - self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, - version_id=version_id) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client_helpers.py deleted file mode 100644 index d2de950b7c83..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client_helpers.py +++ /dev/null @@ -1,27 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Any, Tuple, TYPE_CHECKING -from urllib.parse import urlparse -from ._shared.base_client import parse_query - -if TYPE_CHECKING: - from urllib.parse import ParseResult - - -def _parse_url(account_url: str) -> Tuple["ParseResult", Any]: - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError as exc: - raise ValueError("Account URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {account_url}") - - _, sas_token = parse_query(parsed_url.query) - - return parsed_url, sas_token diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_client.py similarity index 72% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/_client.py index cabfed8f0666..c02b42eb20c5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_client.py @@ -2,20 +2,20 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any +from typing import Any, TYPE_CHECKING, Union from typing_extensions import Self from azure.core import PipelineClient +from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse -from . import models as _models -from ._configuration import AzureBlobStorageConfiguration +from ._configuration import BlobClientConfiguration from ._serialization import Deserializer, Serializer from .operations import ( AppendBlobOperations, @@ -26,9 +26,13 @@ ServiceOperations, ) +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential -class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword - """AzureBlobStorage. + +class BlobClient: # pylint: disable=client-accepts-api-version-keyword + """BlobClient. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.operations.ServiceOperations @@ -42,20 +46,18 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :vartype append_blob: azure.storage.blob.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the target of the - desired operation. Required. - :type url: str - :param base_url: Service URL. Required. Default value is "". - :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2024-08-04". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param endpoint: The host name of the blob storage account, e.g. + accountName.blob.core.windows.net. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a + AzureKeyCredential type or a TokenCredential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any - ) -> None: - self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = BlobClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -73,11 +75,10 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) @@ -86,13 +87,13 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = client._send_request(request) + >>> response = client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -105,7 +106,11 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_configuration.py new file mode 100644 index 000000000000..6f729172c8f4 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_configuration.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class BlobClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BlobClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: The host name of the blob storage account, e.g. + accountName.blob.core.windows.net. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a + AzureKeyCredential type or a TokenCredential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "storage-blob/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "api-key", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py deleted file mode 100644 index 42433f419119..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ /dev/null @@ -1,1620 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only - -import functools -import warnings -from datetime import datetime -from typing import ( - Any, AnyStr, cast, Dict, List, IO, Iterable, Iterator, Optional, overload, Union, - TYPE_CHECKING -) -from urllib.parse import unquote, urlparse -from typing_extensions import Self - -from azure.core.exceptions import HttpResponseError, ResourceNotFoundError -from azure.core.paging import ItemPaged -from azure.core.pipeline import Pipeline -from azure.core.tracing.decorator import distributed_trace -from ._blob_client import BlobClient -from ._container_client_helpers import ( - _format_url, - _generate_delete_blobs_options, - _generate_set_tiers_options, - _parse_url -) -from ._deserialize import deserialize_container_properties -from ._download import StorageStreamDownloader -from ._encryption import StorageEncryptionMixin -from ._generated import AzureBlobStorage -from ._generated.models import SignedIdentifier -from ._lease import BlobLeaseClient -from ._list_blobs_helper import ( - BlobNamesPaged, - BlobPrefix, - BlobPropertiesPaged, - FilteredBlobPaged, - IgnoreListBlobsDeserializer -) -from ._models import ( - BlobProperties, - BlobType, - ContainerProperties, - FilteredBlob -) -from ._serialize import get_access_conditions, get_api_version, get_container_cpk_scope_info, get_modify_conditions -from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper -from ._shared.request_handlers import add_metadata_headers, serialize_iso -from ._shared.response_handlers import ( - process_storage_error, - return_headers_and_deserialized, - return_response_headers -) - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential - from azure.core.pipeline.transport import HttpResponse # pylint: disable=C4756 - from azure.storage.blob import BlobServiceClient - from ._models import ( - AccessPolicy, - PremiumPageBlobTier, - PublicAccess, - StandardBlobTier - ) - - -class ContainerClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods - """A client to interact with a specific container, although that container - may not yet exist. - - For operations relating to a specific blob within this container, a blob client can be - retrieved using the :func:`~get_blob_client` function. - - For more optional configuration, please click - `here `__. - - :param str account_url: - The URI to the storage account. In order to create a client given the full URI to the container, - use the :func:`from_container_url` classmethod. - :param container_name: - The name of the container for the blob. - :type container_name: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START create_container_client_from_service] - :end-before: [END create_container_client_from_service] - :language: python - :dedent: 8 - :caption: Get a ContainerClient from an existing BlobServiceClient. - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START create_container_client_sasurl] - :end-before: [END create_container_client_sasurl] - :language: python - :dedent: 8 - :caption: Creating the container client directly. - """ - def __init__( - self, account_url: str, - container_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) - - self.container_name = container_name - # This parameter is used for the hierarchy traversal. Give precedence to credential. - self._raw_credential = credential if credential else sas_token - self._query_str, credential = self._format_query_string(sas_token, credential) - super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._api_version = get_api_version(kwargs) - self._client = self._build_generated_client() - self._configure_encryption(kwargs) - - def _build_generated_client(self) -> AzureBlobStorage: - client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access - return client - - def _format_url(self, hostname): - return _format_url( - container_name=self.container_name, - hostname=hostname, - scheme=self.scheme, - query_str=self._query_str - ) - - @classmethod - def from_container_url( - cls, container_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create ContainerClient from a container url. - - :param str container_url: - The full endpoint URL to the Container, including SAS token if used. This could be - either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. - :type container_url: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A container client. - :rtype: ~azure.storage.blob.ContainerClient - """ - try: - if not container_url.lower().startswith('http'): - container_url = "https://" + container_url - except AttributeError as exc: - raise ValueError("Container URL must be a string.") from exc - parsed_url = urlparse(container_url) - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {container_url}") - - container_path = parsed_url.path.strip('/').split('/') - account_path = "" - if len(container_path) > 1: - account_path = "/" + "/".join(container_path[:-1]) - account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" - container_name = unquote(container_path[-1]) - if not container_name: - raise ValueError("Invalid URL. Please provide a URL with a valid container name") - return cls(account_url, container_name=container_name, credential=credential, **kwargs) - - @classmethod - def from_connection_string( - cls, conn_str: str, - container_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create ContainerClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param container_name: - The container name for the blob. - :type container_name: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A container client. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string_container] - :end-before: [END auth_from_connection_string_container] - :language: python - :dedent: 8 - :caption: Creating the ContainerClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls( - account_url, container_name=container_name, credential=credential, **kwargs) - - @distributed_trace - def create_container( - self, metadata: Optional[Dict[str, str]] = None, - public_access: Optional[Union["PublicAccess", str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, "datetime"]]: - """ - Creates a new container under the specified account. If the container - with the same name already exists, the operation fails. - - :param metadata: - A dict with name_value pairs to associate with the - container as metadata. Example:{'Category':'test'} - :type metadata: dict[str, str] - :param ~azure.storage.blob.PublicAccess public_access: - Possible values include: 'container', 'blob'. - :keyword container_encryption_scope: - Specifies the default encryption scope to set on the container and use for - all future writes. - - .. versionadded:: 12.2.0 - - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A dictionary of response headers. - :rtype: Dict[str, Union[str, datetime]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START create_container] - :end-before: [END create_container] - :language: python - :dedent: 12 - :caption: Creating a container to store blobs. - """ - headers = kwargs.pop('headers', {}) - timeout = kwargs.pop('timeout', None) - headers.update(add_metadata_headers(metadata)) # type: ignore - container_cpk_scope_info = get_container_cpk_scope_info(kwargs) - try: - return self._client.container.create( # type: ignore - timeout=timeout, - access=public_access, - container_cpk_scope_info=container_cpk_scope_info, - cls=return_response_headers, - headers=headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": - """Renames a container. - - Operation is successful only if the source container exists. - - :param str new_name: - The new container name the user wants to rename to. - :keyword lease: - Specify this to perform only if the lease ID given - matches the active lease ID of the source container. - :type lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The renamed container client. - :rtype: ~azure.storage.blob.ContainerClient - """ - lease = kwargs.pop('lease', None) - try: - kwargs['source_lease_id'] = lease.id - except AttributeError: - kwargs['source_lease_id'] = lease - try: - renamed_container = ContainerClient( - f"{self.scheme}://{self.primary_hostname}", container_name=new_name, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access - return renamed_container - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def delete_container(self, **kwargs: Any) -> None: - """ - Marks the specified container for deletion. The container and any blobs - contained within it are later deleted during garbage collection. - - :keyword lease: - If specified, delete_container only succeeds if the - container's lease is active and matches this ID. - Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START delete_container] - :end-before: [END delete_container] - :language: python - :dedent: 12 - :caption: Delete a container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) - try: - self._client.container.delete( - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def acquire_lease( - self, lease_duration: int =-1, - lease_id: Optional[str] = None, - **kwargs: Any - ) -> BlobLeaseClient: - """ - Requests a new lease. If the container does not have an active lease, - the Blob service creates a lease on the container and returns a new - lease ID. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :param str lease_id: - Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A BlobLeaseClient object, that can be run in a context manager. - :rtype: ~azure.storage.blob.BlobLeaseClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START acquire_lease_on_container] - :end-before: [END acquire_lease_on_container] - :language: python - :dedent: 8 - :caption: Acquiring a lease on the container. - """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) - return lease - - @distributed_trace - def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - """ - try: - return self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def get_container_properties(self, **kwargs: Any) -> ContainerProperties: - """Returns all user-defined metadata and system properties for the specified - container. The data returned does not include the container's list of blobs. - - :keyword lease: - If specified, get_container_properties only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: Properties for the specified container within a container object. - :rtype: ~azure.storage.blob.ContainerProperties - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START get_container_properties] - :end-before: [END get_container_properties] - :language: python - :dedent: 12 - :caption: Getting properties on the container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) - try: - response = self._client.container.get_properties( - timeout=timeout, - lease_access_conditions=access_conditions, - cls=deserialize_container_properties, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - response.name = self.container_name - return response # type: ignore - - @distributed_trace - def exists(self, **kwargs: Any) -> bool: - """ - Returns True if a container exists and returns False otherwise. - - :kwarg int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: boolean - :rtype: bool - """ - try: - self._client.container.get_properties(**kwargs) - return True - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceNotFoundError: - return False - - @distributed_trace - def set_container_metadata( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, "datetime"]]: - """Sets one or more user-defined name-value pairs for the specified - container. Each call to this operation replaces all existing metadata - attached to the container. To remove all metadata from the container, - call this operation with no metadata dict. - - :param metadata: - A dict containing name-value pairs to associate with the container as - metadata. Example: {'category':'test'} - :type metadata: dict[str, str] - :keyword lease: - If specified, set_container_metadata only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Container-updated property dict (Etag and last modified). - :rtype: dict[str, str or datetime] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START set_container_metadata] - :end-before: [END set_container_metadata] - :language: python - :dedent: 12 - :caption: Setting metadata on the container. - """ - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) - try: - return self._client.container.set_metadata( # type: ignore - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - headers=headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def _get_blob_service_client(self) -> "BlobServiceClient": # pylint: disable=client-method-missing-kwargs - """Get a client to interact with the container's parent service account. - - Defaults to current container's credentials. - - :returns: A BlobServiceClient. - :rtype: ~azure.storage.blob.BlobServiceClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service.py - :start-after: [START get_blob_service_client_from_container_client] - :end-before: [END get_blob_service_client_from_container_client] - :language: python - :dedent: 8 - :caption: Get blob service client from container object. - """ - from ._blob_service_client import BlobServiceClient - if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access - _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access - ) - else: - _pipeline = self._pipeline # pylint: disable = protected-access - return BlobServiceClient( - f"{self.scheme}://{self.primary_hostname}", - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, - encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) - - @distributed_trace - def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: - """Gets the permissions for the specified container. - The permissions indicate whether container data may be accessed publicly. - - :keyword lease: - If specified, get_container_access_policy only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Access policy information in a dict. - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START get_container_access_policy] - :end-before: [END get_container_access_policy] - :language: python - :dedent: 12 - :caption: Getting the access policy on the container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) - try: - response, identifiers = self._client.container.get_access_policy( - timeout=timeout, - lease_access_conditions=access_conditions, - cls=return_headers_and_deserialized, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return { - 'public_access': response.get('blob_public_access'), - 'signed_identifiers': identifiers or [] - } - - @distributed_trace - def set_container_access_policy( - self, signed_identifiers: Dict[str, "AccessPolicy"], - public_access: Optional[Union[str, "PublicAccess"]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets the permissions for the specified container or stored access - policies that may be used with Shared Access Signatures. The permissions - indicate whether blobs in a container may be accessed publicly. - - :param signed_identifiers: - A dictionary of access policies to associate with the container. The - dictionary may contain up to 5 elements. An empty dictionary - will clear the access policies set on the service. - :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy] - :param ~azure.storage.blob.PublicAccess public_access: - Possible values include: 'container', 'blob'. - :keyword lease: - Required if the container has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A datetime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified date/time. - :keyword ~datetime.datetime if_unmodified_since: - A datetime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Container-updated property dict (Etag and last modified). - :rtype: dict[str, str or ~datetime.datetime] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START set_container_access_policy] - :end-before: [END set_container_access_policy] - :language: python - :dedent: 12 - :caption: Setting access policy on the container. - """ - if len(signed_identifiers) > 5: - raise ValueError( - 'Too many access policies provided. The server does not support setting ' - 'more than 5 access policies on a single resource.') - identifiers = [] - for key, value in signed_identifiers.items(): - if value: - value.start = serialize_iso(value.start) - value.expiry = serialize_iso(value.expiry) - identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore - signed_identifiers = identifiers # type: ignore - lease = kwargs.pop('lease', None) - mod_conditions = get_modify_conditions(kwargs) - access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) - try: - return cast(Dict[str, Union[str, datetime]], self._client.container.set_access_policy( - container_acl=signed_identifiers or None, - timeout=timeout, - access=public_access, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def list_blobs( - self, name_starts_with: Optional[str] = None, - include: Optional[Union[str, List[str]]] = None, - **kwargs: Any - ) -> ItemPaged[BlobProperties]: - """Returns a generator to list the blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. - - :param str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :param include: - Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', - 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START list_blobs_in_container] - :end-before: [END list_blobs_in_container] - :language: python - :dedent: 8 - :caption: List the blobs in the container. - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - if include and not isinstance(include, list): - include = [include] - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.list_blob_flat_segment, - include=include, - timeout=timeout, - **kwargs) - return ItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=BlobPropertiesPaged) - - @distributed_trace - def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: - """Returns a generator to list the names of blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. - - Note that no additional properties or metadata will be returned when using this API. - Additionally, this API does not have an option to include additional blobs such as snapshots, - versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. - - :keyword str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of blob names as strings. - :rtype: ~azure.core.paging.ItemPaged[str] - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - name_starts_with = kwargs.pop('name_starts_with', None) - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - - # For listing only names we need to create a one-off generated client and - # override its deserializer to prevent deserialization of the full response. - client = self._build_generated_client() - client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access - - command = functools.partial( - client.container.list_blob_flat_segment, - timeout=timeout, - **kwargs) - return ItemPaged( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - container=self.container_name, - page_iterator_class=BlobNamesPaged) - - @distributed_trace - def walk_blobs( - self, name_starts_with: Optional[str] = None, - include: Optional[Union[List[str], str]] = None, - delimiter: str = "/", - **kwargs: Any - ) -> ItemPaged[BlobProperties]: - """Returns a generator to list the blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. This operation will list blobs in accordance with a hierarchy, - as delimited by the specified delimiter character. - - :param str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :param include: - Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', - 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str - :param str delimiter: - When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose - names begin with the same substring up to the appearance of the delimiter - character. The delimiter may be a single character or a string. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - if include and not isinstance(include, list): - include = [include] - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.list_blob_hierarchy_segment, - delimiter=delimiter, - include=include, - timeout=timeout, - **kwargs) - return BlobPrefix( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - container=self.container_name, - delimiter=delimiter) - - @distributed_trace - def find_blobs_by_tags( - self, filter_expression: str, - **kwargs: Any - ) -> ItemPaged[FilteredBlob]: - """Returns a generator to list the blobs under the specified container whose tags - match the given search expression. - The generator will lazily follow the continuation tokens returned by - the service. - - :param str filter_expression: - The expression to find blobs whose tags matches the specified condition. - eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" - :keyword int results_per_page: - The max result per page when paginating. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of FilteredBlob. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] - """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.filter_blobs, - timeout=timeout, - where=filter_expression, - **kwargs) - return ItemPaged( - command, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=FilteredBlobPaged) - - @distributed_trace - def upload_blob( - self, name: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, - length: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs - ) -> BlobClient: - """Creates a new blob from a data source with automatic chunking. - - :param str name: The blob with which to interact. - :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be - either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. The exception to the above is with Append - blob types: if set to False and the data already exists, an error will not be raised - and the data will be appended to the existing blob. If set overwrite=True, then the existing - append blob will be deleted, and a new one created. Defaults to False. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the container has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword progress_hook: - A callback to track the progress of a long running upload. The signature is - function(current: int, total: Optional[int]) where current is the number of bytes transferred - so far, and total is the size of the blob or None if the size is unknown. - :paramtype progress_hook: Callable[[int, Optional[int]], None] - :returns: A BlobClient to interact with the newly uploaded blob. - :rtype: ~azure.storage.blob.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START upload_blob_to_container] - :end-before: [END upload_blob_to_container] - :language: python - :dedent: 8 - :caption: Upload blob to the container. - """ - if isinstance(name, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param name is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob = self.get_blob_client(name) - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - encoding = kwargs.pop('encoding', 'UTF-8') - blob.upload_blob( - data, - blob_type=blob_type, - length=length, - metadata=metadata, - timeout=timeout, - encoding=encoding, - **kwargs - ) - return blob - - @distributed_trace - def delete_blob( - self, blob: str, - delete_snapshots: Optional[str] = None, - **kwargs: Any - ) -> None: - """Marks the specified blob or snapshot for deletion. - - The blob is later deleted during garbage collection. - Note that in order to delete a blob, you must delete all of its - snapshots. You can delete both at the same time with the delete_blob - operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot - and retains the blob or snapshot for specified number of days. - After specified number of days, blob's data is removed from the service during garbage collection. - Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` - option. Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` - - :param str blob: The blob with which to interact. - :param str delete_snapshots: - Required if the blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to delete. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_client = self.get_blob_client(blob) # type: ignore - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - blob_client.delete_blob( # type: ignore - delete_snapshots=delete_snapshots, - timeout=timeout, - **kwargs) - - @overload - def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: str, - **kwargs: Any - ) -> StorageStreamDownloader[str]: - ... - - @overload - def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: None = None, - **kwargs: Any - ) -> StorageStreamDownloader[bytes]: - ... - - @distributed_trace - def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: Union[str, None] = None, - **kwargs: Any - ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: - """Downloads a blob to the StorageStreamDownloader. The readall() method must - be used to read all the content or readinto() must be used to download the blob into - a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. - - :param str blob: The blob with which to interact. - :param int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to download. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. If specified, download_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword str encoding: - Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. - :keyword progress_hook: - A callback to track the progress of a long running download. The signature is - function(current: int, total: int) where current is the number of bytes transferred - so far, and total is the total size of the download. - :paramtype progress_hook: Callable[[int, int], None] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: A streaming object (StorageStreamDownloader) - :rtype: ~azure.storage.blob.StorageStreamDownloader - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_client = self.get_blob_client(blob) # type: ignore - kwargs.setdefault('merge_span', True) - return blob_client.download_blob( - offset=offset, - length=length, - encoding=encoding, - **kwargs) - - @distributed_trace - def delete_blobs( # pylint: disable=delete-operation-wrong-return-type - self, *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> Iterator["HttpResponse"]: - """Marks the specified blobs or snapshots for deletion. - - The blobs are later deleted during garbage collection. - Note that in order to delete blobs, you must delete all of their - snapshots. You can delete both at the same time with the delete_blobs operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots - and retains the blobs or snapshots for specified number of days. - After specified number of days, blobs' data is removed from the service during garbage collection. - Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` - - The maximum number of blobs that can be deleted in a single request is 256. - - :param blobs: - The blobs to delete. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - snapshot you want to delete: - key: 'snapshot', value type: str - version id: - key: 'version_id', value type: str - whether to delete snapshots when deleting blob: - key: 'delete_snapshots', value: 'include' or 'only' - if the blob modified or not: - key: 'if_modified_since', 'if_unmodified_since', value type: datetime - etag: - key: 'etag', value type: str - match the etag or not: - key: 'match_condition', value type: MatchConditions - tags match condition: - key: 'if_tags_match_condition', value type: str - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: Union[str, Dict[str, Any], BlobProperties] - :keyword str delete_snapshots: - Required if a blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: An iterator of responses, one for each blob in order - :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common.py - :start-after: [START delete_multiple_blobs] - :end-before: [END delete_multiple_blobs] - :language: python - :dedent: 8 - :caption: Deleting multiple blobs. - """ - if len(blobs) == 0: - return iter([]) - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - - reqs, options = _generate_delete_blobs_options( - self._query_str, - self.container_name, - self._client, - *blobs, - **kwargs - ) - - return self._batch_send(*reqs, **options) - - @distributed_trace - def set_standard_blob_tier_blobs( - self, standard_blob_tier: Optional[Union[str, "StandardBlobTier"]], - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> Iterator["HttpResponse"]: - """This operation sets the tier on block blobs. - - A block blob's tier determines Hot/Cool/Archive storage type. - This operation does not update the blob's ETag. - - The maximum number of blobs that can be updated in a single request is 256. - - :param standard_blob_tier: - Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', - 'Archive'. The hot tier is optimized for storing data that is accessed - frequently. The cool storage tier is optimized for storing data that - is infrequently accessed and stored for at least a month. The archive - tier is optimized for storing data that is rarely accessed and stored - for at least six months with flexible latency requirements. - - .. note:: - If you want to set different tier on different blobs please set this positional parameter to None. - Then the blob tier on every BlobProperties will be taken. - - :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier - :param blobs: - The blobs with which to interact. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - standard blob tier: - key: 'blob_tier', value type: StandardBlobTier - rehydrate priority: - key: 'rehydrate_priority', value type: RehydratePriority - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - snapshot: - key: "snapshot", value type: str - version id: - key: "version_id", value type: str - tags match condition: - key: 'if_tags_match_condition', value type: str - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. - :return: An iterator of responses, one for each blob in order - :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] - """ - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - reqs, options = _generate_set_tiers_options( - self._query_str, - self.container_name, - standard_blob_tier, - self._client, - *blobs, - **kwargs) - - return self._batch_send(*reqs, **options) - - @distributed_trace - def set_premium_page_blob_tier_blobs( - self, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]], - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> Iterator["HttpResponse"]: - """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. - - The maximum number of blobs that can be updated in a single request is 256. - - :param premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - - .. note:: - If you want to set different tier on different blobs please set this positional parameter to None. - Then the blob tier on every BlobProperties will be taken. - - :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier - :param blobs: - The blobs with which to interact. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - premium blob tier: - key: 'blob_tier', value type: PremiumPageBlobTier - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. - :return: An iterator of responses, one for each blob in order - :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] - """ - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - reqs, options = _generate_set_tiers_options( - self._query_str, - self.container_name, - premium_page_blob_tier, - self._client, - *blobs, - **kwargs) - - return self._batch_send(*reqs, **options) - - def get_blob_client( - self, blob: str, - snapshot: Optional[str] = None, - *, - version_id: Optional[str] = None - ) -> BlobClient: - """Get a client to interact with the specified blob. - - The blob need not already exist. - - :param str blob: - The blob with which to interact. - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`~BlobClient.create_snapshot()`. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :returns: A BlobClient. - :rtype: ~azure.storage.blob.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START get_blob_client] - :end-before: [END get_blob_client] - :language: python - :dedent: 8 - :caption: Get the blob client. - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_name = blob.get('name') - else: - blob_name = blob - _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access - ) - return BlobClient( - self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, - version_id=version_id) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py deleted file mode 100644 index 9ee405b4c31b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py +++ /dev/null @@ -1,266 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union -from urllib.parse import quote, urlparse - -from azure.core import MatchConditions -from azure.core.pipeline.transport import HttpRequest -from ._blob_client_helpers import _generic_delete_blob_options -from ._generated import AzureBlobStorage -from ._models import BlobProperties -from ._shared.base_client import parse_query - -if TYPE_CHECKING: - from azure.storage.blob import RehydratePriority - from urllib.parse import ParseResult - from ._generated.models import LeaseAccessConditions, ModifiedAccessConditions - from ._models import PremiumPageBlobTier, StandardBlobTier - - -def _parse_url(account_url: str, container_name: str) -> Tuple["ParseResult", Any]: - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError as exc: - raise ValueError("Container URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) - if not container_name: - raise ValueError("Please specify a container name.") - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {account_url}") - - _, sas_token = parse_query(parsed_url.query) - - return parsed_url, sas_token - -def _format_url(container_name: Union[bytes, str], hostname: str, scheme: str, query_str: str) -> str: - if isinstance(container_name, str): - container_name = container_name.encode('UTF-8') - return f"{scheme}://{hostname}/{quote(container_name)}{query_str}" - -# This code is a copy from _generated. -# Once Autorest is able to provide request preparation this code should be removed. -def _generate_delete_blobs_subrequest_options( - client: AzureBlobStorage, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - delete_snapshots: Optional[str] = None, - lease_access_conditions: Optional["LeaseAccessConditions"] = None, - modified_access_conditions: Optional["ModifiedAccessConditions"] = None, - **kwargs -) -> Tuple[Dict[str, Any], Dict[str, Any]]: - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - if_modified_since = None - if modified_access_conditions is not None: - if_modified_since = modified_access_conditions.if_modified_since - if_unmodified_since = None - if modified_access_conditions is not None: - if_unmodified_since = modified_access_conditions.if_unmodified_since - if_match = None - if modified_access_conditions is not None: - if_match = modified_access_conditions.if_match - if_none_match = None - if modified_access_conditions is not None: - if_none_match = modified_access_conditions.if_none_match - if_tags = None - if modified_access_conditions is not None: - if_tags = modified_access_conditions.if_tags - - # Construct parameters - timeout = kwargs.pop('timeout', None) - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if version_id is not None: - query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - - # Construct headers - header_parameters = {} - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = client._serialize.header( # pylint: disable=protected-access - "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = client._serialize.header( # pylint: disable=protected-access - "lease_id", lease_id, 'str') - if if_modified_since is not None: - header_parameters['If-Modified-Since'] = client._serialize.header( # pylint: disable=protected-access - "if_modified_since", if_modified_since, 'rfc-1123') - if if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = client._serialize.header( # pylint: disable=protected-access - "if_unmodified_since", if_unmodified_since, 'rfc-1123') - if if_match is not None: - header_parameters['If-Match'] = client._serialize.header( # pylint: disable=protected-access - "if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = client._serialize.header( # pylint: disable=protected-access - "if_none_match", if_none_match, 'str') - if if_tags is not None: - header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - -def _generate_delete_blobs_options( - query_str: str, - container_name: str, - client: AzureBlobStorage, - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any -) -> Tuple[List[HttpRequest], Dict[str, Any]]: - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - delete_snapshots = kwargs.pop('delete_snapshots', None) - if_modified_since = kwargs.pop('if_modified_since', None) - if_unmodified_since = kwargs.pop('if_unmodified_since', None) - if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) - url_prepend = kwargs.pop('url_prepend', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "", - 'path': container_name, - 'restype': 'restype=container&' - }) - - reqs = [] - for blob in blobs: - if not isinstance(blob, str): - blob_name = blob.get('name') - options = _generic_delete_blob_options( # pylint: disable=protected-access - snapshot=blob.get('snapshot'), - version_id=blob.get('version_id'), - delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), - lease=blob.get('lease_id'), - if_modified_since=if_modified_since or blob.get('if_modified_since'), - if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), - etag=blob.get('etag'), - if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), - match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') - else None, - timeout=blob.get('timeout'), - ) - else: - blob_name = blob - options = _generic_delete_blob_options( # pylint: disable=protected-access - delete_snapshots=delete_snapshots, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, - if_tags_match_condition=if_tags_match_condition - ) - - query_parameters, header_parameters = _generate_delete_blobs_subrequest_options(client, **options) - - req = HttpRequest( - "DELETE", - (f"{'/' + quote(url_prepend) if url_prepend else ''}/" - f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), - headers=header_parameters - ) - - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs - -# This code is a copy from _generated. -# Once Autorest is able to provide request preparation this code should be removed. -def _generate_set_tiers_subrequest_options( - client: AzureBlobStorage, - tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - rehydrate_priority: Optional["RehydratePriority"] = None, - lease_access_conditions: Optional["LeaseAccessConditions"] = None, - **kwargs: Any -) -> Tuple[Dict[str, Any], Dict[str, Any]]: - if not tier: - raise ValueError("A blob tier must be specified") - if snapshot and version_id: - raise ValueError("Snapshot and version_id cannot be set at the same time") - if_tags = kwargs.pop('if_tags', None) - - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - - comp = "tier" - timeout = kwargs.pop('timeout', None) - # Construct parameters - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if version_id is not None: - query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - query_parameters['comp'] = client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - - # Construct headers - header_parameters = {} - header_parameters['x-ms-access-tier'] = client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = client._serialize.header( # pylint: disable=protected-access - "rehydrate_priority", rehydrate_priority, 'str') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access - if if_tags is not None: - header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - -def _generate_set_tiers_options( - query_str: str, - container_name: str, - blob_tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], - client: AzureBlobStorage, - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any -) -> Tuple[List[HttpRequest], Dict[str, Any]]: - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - rehydrate_priority = kwargs.pop('rehydrate_priority', None) - if_tags = kwargs.pop('if_tags_match_condition', None) - url_prepend = kwargs.pop('url_prepend', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "", - 'path': container_name, - 'restype': 'restype=container&' - }) - - reqs = [] - for blob in blobs: - if not isinstance(blob, str): - blob_name = blob.get('name') - tier = blob_tier or blob.get('blob_tier') - query_parameters, header_parameters = _generate_set_tiers_subrequest_options( - client=client, - tier=tier, - snapshot=blob.get('snapshot'), - version_id=blob.get('version_id'), - rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), - lease_access_conditions=blob.get('lease_id'), - if_tags=if_tags or blob.get('if_tags_match_condition'), - timeout=timeout or blob.get('timeout') - ) - else: - blob_name = blob - query_parameters, header_parameters = _generate_set_tiers_subrequest_options( - client, blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) - - req = HttpRequest( - "PUT", - (f"{'/' + quote(url_prepend) if url_prepend else ''}/" - f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), - headers=header_parameters - ) - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py deleted file mode 100644 index 4f18b0744b9a..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py +++ /dev/null @@ -1,234 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING -from urllib.parse import unquote -from xml.etree.ElementTree import Element - -from ._models import ( - BlobAnalyticsLogging, - BlobProperties, - BlobType, - ContainerProperties, - ContentSettings, - CopyProperties, - CorsRule, - ImmutabilityPolicy, - LeaseProperties, - Metrics, - ObjectReplicationPolicy, - ObjectReplicationRule, - RetentionPolicy, - StaticWebsite -) -from ._shared.models import get_enum_value -from ._shared.response_handlers import deserialize_metadata - -if TYPE_CHECKING: - from azure.core.pipeline import PipelineResponse - from ._generated.models import ( - BlobItemInternal, - BlobTags, - PageList, - StorageServiceProperties, - StorageServiceStats, - ) - from ._shared.models import LocationMode - -def deserialize_pipeline_response_into_cls(cls_method, response: "PipelineResponse", obj: Any, headers: Dict[str, Any]): - try: - deserialized_response = response.http_response - except AttributeError: - deserialized_response = response - return cls_method(deserialized_response, obj, headers) - - -def deserialize_blob_properties(response: "PipelineResponse", obj: Any, headers: Dict[str, Any]) -> BlobProperties: - blob_properties = BlobProperties( - metadata=deserialize_metadata(response, obj, headers), - object_replication_source_properties=deserialize_ors_policies(response.http_response.headers), - **headers - ) - if 'Content-Range' in headers: - if 'x-ms-blob-content-md5' in headers: - blob_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] - else: - blob_properties.content_settings.content_md5 = None - return blob_properties - - -def deserialize_ors_policies(policy_dictionary: Optional[Dict[str, str]]) -> Optional[List[ObjectReplicationPolicy]]: - - if policy_dictionary is None: - return None - # For source blobs (blobs that have policy ids and rule ids applied to them), - # the header will be formatted as "x-ms-or-_: {Complete, Failed}". - # The value of this header is the status of the replication. - or_policy_status_headers = {key: val for key, val in policy_dictionary.items() - if 'or-' in key and key != 'x-ms-or-policy-id'} - - parsed_result: Dict[str, List[ObjectReplicationRule]] = {} - - for key, val in or_policy_status_headers.items(): - # list blobs gives or-policy_rule and get blob properties gives x-ms-or-policy_rule - policy_and_rule_ids = key.split('or-')[1].split('_') - policy_id = policy_and_rule_ids[0] - rule_id = policy_and_rule_ids[1] - - # If we are seeing this policy for the first time, create a new list to store rule_id -> result - parsed_result[policy_id] = parsed_result.get(policy_id) or [] - parsed_result[policy_id].append(ObjectReplicationRule(rule_id=rule_id, status=val)) - - result_list = [ObjectReplicationPolicy(policy_id=k, rules=v) for k, v in parsed_result.items()] - - return result_list - - -def deserialize_blob_stream( - response: "PipelineResponse", - obj: Any, - headers: Dict[str, Any] -) -> Tuple["LocationMode", Any]: - blob_properties = deserialize_blob_properties(response, obj, headers) - obj.properties = blob_properties - return response.http_response.location_mode, obj - - -def deserialize_container_properties( - response: "PipelineResponse", - obj: Any, - headers: Dict[str, Any] -) -> ContainerProperties: - metadata = deserialize_metadata(response, obj, headers) - container_properties = ContainerProperties( - metadata=metadata, - **headers - ) - return container_properties - - -def get_page_ranges_result(ranges: "PageList") -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: - page_range = [] - clear_range = [] - if ranges.page_range: - page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] - if ranges.clear_range: - clear_range = [{'start': b.start, 'end': b.end} for b in ranges.clear_range] - return page_range, clear_range - - -def service_stats_deserialize(generated: "StorageServiceStats") -> Dict[str, Any]: - status = None - last_sync_time = None - if generated.geo_replication is not None: - status = generated.geo_replication.status - last_sync_time = generated.geo_replication.last_sync_time - return { - 'geo_replication': { - 'status': status, - 'last_sync_time': last_sync_time - } - } - -def service_properties_deserialize(generated: "StorageServiceProperties") -> Dict[str, Any]: - cors_list = None - if generated.cors is not None: - cors_list = [CorsRule._from_generated(cors) for cors in generated.cors] # pylint: disable=protected-access - return { - 'analytics_logging': BlobAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access - 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access - 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access - 'cors': cors_list, - 'target_version': generated.default_service_version, # pylint: disable=protected-access - 'delete_retention_policy': RetentionPolicy._from_generated(generated.delete_retention_policy), # pylint: disable=protected-access - 'static_website': StaticWebsite._from_generated(generated.static_website), # pylint: disable=protected-access - } - - -def get_blob_properties_from_generated_code(generated: "BlobItemInternal") -> BlobProperties: - blob = BlobProperties() - if generated.name.encoded and generated.name.content is not None: - blob.name = unquote(generated.name.content) - else: - blob.name = generated.name.content #type: ignore - blob_type = get_enum_value(generated.properties.blob_type) - blob.blob_type = BlobType(blob_type) - blob.etag = generated.properties.etag - blob.deleted = generated.deleted - blob.snapshot = generated.snapshot - blob.is_append_blob_sealed = generated.properties.is_sealed - blob.metadata = generated.metadata.additional_properties if generated.metadata else {} # type: ignore [assignment] - blob.encrypted_metadata = generated.metadata.encrypted if generated.metadata else None - blob.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access - blob.copy = CopyProperties._from_generated(generated) # pylint: disable=protected-access - blob.last_modified = generated.properties.last_modified - blob.creation_time = generated.properties.creation_time # type: ignore [assignment] - blob.content_settings = ContentSettings._from_generated(generated) # pylint: disable=protected-access - blob.size = generated.properties.content_length # type: ignore [assignment] - blob.page_blob_sequence_number = generated.properties.blob_sequence_number - blob.server_encrypted = generated.properties.server_encrypted # type: ignore [assignment] - blob.encryption_scope = generated.properties.encryption_scope - blob.deleted_time = generated.properties.deleted_time - blob.remaining_retention_days = generated.properties.remaining_retention_days - blob.blob_tier = generated.properties.access_tier # type: ignore [assignment] - blob.rehydrate_priority = generated.properties.rehydrate_priority - blob.blob_tier_inferred = generated.properties.access_tier_inferred - blob.archive_status = generated.properties.archive_status - blob.blob_tier_change_time = generated.properties.access_tier_change_time - blob.version_id = generated.version_id - blob.is_current_version = generated.is_current_version - blob.tag_count = generated.properties.tag_count - blob.tags = parse_tags(generated.blob_tags) # pylint: disable=protected-access - blob.object_replication_source_properties = deserialize_ors_policies(generated.object_replication_metadata) - blob.last_accessed_on = generated.properties.last_accessed_on - blob.immutability_policy = ImmutabilityPolicy._from_generated(generated) # pylint: disable=protected-access - blob.has_legal_hold = generated.properties.legal_hold - blob.has_versions_only = generated.has_versions_only - return blob - -def parse_tags(generated_tags: Optional["BlobTags"]) -> Optional[Dict[str, str]]: - """Deserialize a list of BlobTag objects into a dict. - - :param Optional[BlobTags] generated_tags: - A list containing the BlobTag objects from generated code. - :returns: A dictionary of the BlobTag objects. - :rtype: Optional[Dict[str, str]] - """ - if generated_tags: - tag_dict = {t.key: t.value for t in generated_tags.blob_tag_set} - return tag_dict - return None - - -def load_single_xml_node(element: Element, name: str) -> Optional[Element]: - return element.find(name) - - -def load_many_xml_nodes( - element: Element, - name: str, - wrapper: Optional[str] = None -) -> List[Optional[Element]]: - found_element: Optional[Element] = element - if wrapper: - found_element = load_single_xml_node(element, wrapper) - if found_element is None: - return [] - return list(found_element.findall(name)) - - -def load_xml_string(element: Element, name: str) -> Optional[str]: - node = element.find(name) - if node is None or not node.text: - return None - return node.text - - -def load_xml_int(element: Element, name: str) -> Optional[int]: - node = element.find(name) - if node is None or not node.text: - return None - return int(node.text) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py deleted file mode 100644 index 6ba994735613..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py +++ /dev/null @@ -1,933 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -import codecs -import sys -import threading -import time -import warnings -from io import BytesIO, StringIO -from typing import ( - Any, Callable, cast, Dict, Generator, - Generic, IO, Iterator, List, Optional, - overload, Tuple, TypeVar, Union, TYPE_CHECKING -) - -from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError -from azure.core.tracing.common import with_current_context - -from ._shared.request_handlers import validate_and_format_range_headers -from ._shared.response_handlers import parse_length_from_content_range, process_storage_error -from ._deserialize import deserialize_blob_properties, get_page_ranges_result -from ._encryption import ( - adjust_blob_size_for_encryption, - decrypt_blob, - get_adjusted_download_range_and_offset, - is_encryption_v2, - parse_encryption_data -) - -if TYPE_CHECKING: - from codecs import IncrementalDecoder - from ._encryption import _EncryptionData - from ._generated import AzureBlobStorage - from ._generated.operations import BlobOperations - from ._models import BlobProperties - from ._shared.models import StorageConfiguration - - -T = TypeVar('T', bytes, str) - - -def process_range_and_offset( - start_range: int, - end_range: int, - length: Optional[int], - encryption_options: Dict[str, Any], - encryption_data: Optional["_EncryptionData"] -) -> Tuple[Tuple[int, int], Tuple[int, int]]: - start_offset, end_offset = 0, 0 - if encryption_options.get("key") is not None or encryption_options.get("resolver") is not None: - return get_adjusted_download_range_and_offset( - start_range, - end_range, - length, - encryption_data) - - return (start_range, end_range), (start_offset, end_offset) - - -def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: - if data is None: - raise ValueError("Response cannot be None.") - - content = b"".join(list(data)) - - if content and encryption.get("key") is not None or encryption.get("resolver") is not None: - try: - return decrypt_blob( - encryption.get("required") or False, - encryption.get("key"), - encryption.get("resolver"), - content, - start_offset, - end_offset, - data.response.headers, - ) - except Exception as error: - raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) from error - return content - - -class _ChunkDownloader(object): # pylint: disable=too-many-instance-attributes - def __init__( - self, - client: "BlobOperations", - total_size: int, - chunk_size: int, - current_progress: int, - start_range: int, - end_range: int, - validate_content: bool, - encryption_options: Dict[str, Any], - encryption_data: Optional["_EncryptionData"] = None, - stream: Any = None, - parallel: Optional[int] = None, - non_empty_ranges: Optional[List[Dict[str, Any]]] = None, - progress_hook: Optional[Callable[[int, Optional[int]], None]] = None, - **kwargs: Any - ) -> None: - self.client = client - self.non_empty_ranges = non_empty_ranges - - # Information on the download range/chunk size - self.chunk_size = chunk_size - self.total_size = total_size - self.start_index = start_range - self.end_index = end_range - - # The destination that we will write to - self.stream = stream - self.stream_lock = threading.Lock() if parallel else None - self.progress_lock = threading.Lock() if parallel else None - self.progress_hook = progress_hook - - # For a parallel download, the stream is always seekable, so we note down the current position - # in order to seek to the right place when out-of-order chunks come in - self.stream_start = stream.tell() if parallel else None - - # Download progress so far - self.progress_total = current_progress - - # Encryption - self.encryption_options = encryption_options - self.encryption_data = encryption_data - - # Parameters for each get operation - self.validate_content = validate_content - self.request_options = kwargs - - def _calculate_range(self, chunk_start: int) -> Tuple[int, int]: - if chunk_start + self.chunk_size > self.end_index: - chunk_end = self.end_index - else: - chunk_end = chunk_start + self.chunk_size - return chunk_start, chunk_end - - def get_chunk_offsets(self) -> Generator[int, None, None]: - index = self.start_index - while index < self.end_index: - yield index - index += self.chunk_size - - def process_chunk(self, chunk_start: int) -> None: - chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data, _ = self._download_chunk(chunk_start, chunk_end - 1) - length = chunk_end - chunk_start - if length > 0: - self._write_to_stream(chunk_data, chunk_start) - self._update_progress(length) - - def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: - chunk_start, chunk_end = self._calculate_range(chunk_start) - return self._download_chunk(chunk_start, chunk_end - 1) - - def _update_progress(self, length: int) -> None: - if self.progress_lock: - with self.progress_lock: # pylint: disable=not-context-manager - self.progress_total += length - else: - self.progress_total += length - - if self.progress_hook: - self.progress_hook(self.progress_total, self.total_size) - - def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: - if self.stream_lock: - with self.stream_lock: # pylint: disable=not-context-manager - self.stream.seek(self.stream_start + (chunk_start - self.start_index)) - self.stream.write(chunk_data) - else: - self.stream.write(chunk_data) - - def _do_optimize(self, given_range_start: int, given_range_end: int) -> bool: - # If we have no page range list stored, then assume there's data everywhere for that page blob - # or it's a block blob or append blob - if self.non_empty_ranges is None: - return False - - for source_range in self.non_empty_ranges: - # Case 1: As the range list is sorted, if we've reached such a source_range - # we've checked all the appropriate source_range already and haven't found any overlapping. - # so the given range doesn't have any data and download optimization could be applied. - # given range: | | - # source range: | | - if given_range_end < source_range['start']: # pylint:disable=no-else-return - return True - # Case 2: the given range comes after source_range, continue checking. - # given range: | | - # source range: | | - elif source_range['end'] < given_range_start: - pass - # Case 3: source_range and given range overlap somehow, no need to optimize. - else: - return False - # Went through all src_ranges, but nothing overlapped. Optimization will be applied. - return True - - def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: - if self.encryption_options is None: - raise ValueError("Required argument is missing: encryption_options") - download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data - ) - - # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. - # Do optimize and create empty chunk locally if condition is met. - if self._do_optimize(download_range[0], download_range[1]): - content_length = download_range[1] - download_range[0] + 1 - chunk_data = b"\x00" * content_length - else: - range_header, range_validation = validate_and_format_range_headers( - download_range[0], - download_range[1], - check_content_md5=self.validate_content - ) - - retry_active = True - retry_total = 3 - while retry_active: - response: Any = None - try: - _, response = self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - ) - except HttpResponseError as error: - process_storage_error(error) - - try: - chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) - retry_active = False - except (IncompleteReadError, HttpResponseError, DecodeError) as error: - retry_total -= 1 - if retry_total <= 0: - raise HttpResponseError(error, error=error) from error - time.sleep(1) - content_length = response.content_length - - # This makes sure that if_match is set so that we can validate - # that subsequent downloads are to an unmodified blob - if self.request_options.get("modified_access_conditions"): - self.request_options["modified_access_conditions"].if_match = response.properties.etag - - return chunk_data, content_length - - -class _ChunkIterator(object): - """Iterator for chunks in blob download stream.""" - - def __init__(self, size: int, content: bytes, downloader: Optional[_ChunkDownloader], chunk_size: int) -> None: - self.size = size - self._chunk_size = chunk_size - self._current_content = content - self._iter_downloader = downloader - self._iter_chunks: Optional[Generator[int, None, None]] = None - self._complete = size == 0 - - def __len__(self) -> int: - return self.size - - def __iter__(self) -> Iterator[bytes]: - return self - - # Iterate through responses. - def __next__(self) -> bytes: - if self._complete: - raise StopIteration("Download complete") - if not self._iter_downloader: - # cut the data obtained from initial GET into chunks - if len(self._current_content) > self._chunk_size: - return self._get_chunk_data() - self._complete = True - return self._current_content - - if not self._iter_chunks: - self._iter_chunks = self._iter_downloader.get_chunk_offsets() - - # initial GET result still has more than _chunk_size bytes of data - if len(self._current_content) >= self._chunk_size: - return self._get_chunk_data() - - try: - next_chunk = next(self._iter_chunks) - self._current_content += self._iter_downloader.yield_chunk(next_chunk)[0] - except StopIteration as e: - self._complete = True - if self._current_content: - return self._current_content - raise e - - # the current content from the first get is still there but smaller than chunk size - # therefore we want to make sure its also included - return self._get_chunk_data() - - next = __next__ # Python 2 compatibility. - - def _get_chunk_data(self) -> bytes: - chunk_data = self._current_content[: self._chunk_size] - self._current_content = self._current_content[self._chunk_size:] - return chunk_data - - -class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes - """ - A streaming object to download from Azure Storage. - """ - - name: str - """The name of the blob being downloaded.""" - container: str - """The name of the container where the blob is.""" - properties: "BlobProperties" - """The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties.""" - size: int - """The size of the total data in the stream. This will be the byte range if specified, - otherwise the total size of the blob.""" - - def __init__( - self, - clients: "AzureBlobStorage" = None, # type: ignore [assignment] - config: "StorageConfiguration" = None, # type: ignore [assignment] - start_range: Optional[int] = None, - end_range: Optional[int] = None, - validate_content: bool = None, # type: ignore [assignment] - encryption_options: Dict[str, Any] = None, # type: ignore [assignment] - max_concurrency: int = 1, - name: str = None, # type: ignore [assignment] - container: str = None, # type: ignore [assignment] - encoding: Optional[str] = None, - download_cls: Optional[Callable] = None, - **kwargs: Any - ) -> None: - self.name = name - self.container = container - self.size = 0 - - self._clients = clients - self._config = config - self._start_range = start_range - self._end_range = end_range - self._max_concurrency = max_concurrency - self._encoding = encoding - self._validate_content = validate_content - self._encryption_options = encryption_options or {} - self._progress_hook = kwargs.pop('progress_hook', None) - self._request_options = kwargs - self._response = None - self._location_mode = None - self._current_content: Union[str, bytes] = b'' - self._file_size = 0 - self._non_empty_ranges = None - self._encryption_data: Optional["_EncryptionData"] = None - - # The content download offset, after any processing (decryption), in bytes - self._download_offset = 0 - # The raw download offset, before processing (decryption), in bytes - self._raw_download_offset = 0 - # The offset the stream has been read to in bytes or chars depending on mode - self._read_offset = 0 - # The offset into current_content that has been consumed in bytes or chars depending on mode - self._current_content_offset = 0 - - self._text_mode: Optional[bool] = None - self._decoder: Optional["IncrementalDecoder"] = None - # Whether the current content is the first chunk of download content or not - self._first_chunk = True - self._download_start = self._start_range or 0 - - # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ - # but needs to be changed to cls in the request options. - self._request_options['cls'] = download_cls - - if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: - self._get_encryption_data_request() - - # The service only provides transactional MD5s for chunks under 4MB. - # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first - # chunk so a transactional MD5 can be retrieved. - first_get_size = ( - self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size - ) - initial_request_start = self._download_start - if self._end_range is not None and self._end_range - initial_request_start < first_get_size: - initial_request_end = self._end_range - else: - initial_request_end = initial_request_start + first_get_size - 1 - - self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, - initial_request_end, - self._end_range, - self._encryption_options, - self._encryption_data - ) - - self._response = self._initial_request() - self.properties = cast("BlobProperties", self._response.properties) - self.properties.name = self.name - self.properties.container = self.container - - # Set the content length to the download size instead of the size of the last range - self.properties.size = self.size - self.properties.content_range = (f"bytes {self._download_start}-" - f"{self._end_range if self._end_range is not None else self._file_size - 1}/" - f"{self._file_size}") - - # Overwrite the content MD5 as it is the MD5 for the last range instead - # of the stored MD5 - # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None # type: ignore [attr-defined] - - def __len__(self): - return self.size - - def _get_encryption_data_request(self) -> None: - # Save current request cls - download_cls = self._request_options.pop('cls', None) - # Adjust cls for get_properties - self._request_options['cls'] = deserialize_blob_properties - - properties = cast("BlobProperties", self._clients.blob.get_properties(**self._request_options)) - # This will return None if there is no encryption metadata or there are parsing errors. - # That is acceptable here, the proper error will be caught and surfaced when attempting - # to decrypt the blob. - self._encryption_data = parse_encryption_data(properties.metadata) - - # Restore cls for download - self._request_options['cls'] = download_cls - - @property - def _download_complete(self): - if is_encryption_v2(self._encryption_data): - return self._download_offset >= self.size - return self._raw_download_offset >= self.size - - def _initial_request(self): - range_header, range_validation = validate_and_format_range_headers( - self._initial_range[0], - self._initial_range[1], - start_range_required=False, - end_range_required=False, - check_content_md5=self._validate_content - ) - - retry_active = True - retry_total = 3 - while retry_active: - try: - location_mode, response = cast(Tuple[Optional[str], Any], self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options - )) - - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._file_size is None: - raise ValueError("Required Content-Range response header is missing or malformed.") - # Remove any extra encryption data size from blob size - self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) - - if self._end_range is not None and self._start_range is not None: - # Use the end range index unless it is over the end of the file - self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size - - except HttpResponseError as error: - if self._start_range is None and error.response and error.response.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options - ) - except HttpResponseError as e: - process_storage_error(e) - - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) - - try: - if self.size == 0: - self._current_content = b"" - else: - self._current_content = process_content( - response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) - retry_active = False - except (IncompleteReadError, HttpResponseError, DecodeError) as error: - retry_total -= 1 - if retry_total <= 0: - raise HttpResponseError(error, error=error) from error - time.sleep(1) - self._download_offset += len(self._current_content) - self._raw_download_offset += response.content_length - - # get page ranges to optimize downloading sparse page blob - if response.properties.blob_type == 'PageBlob': - try: - page_ranges = self._clients.page_blob.get_page_ranges() - self._non_empty_ranges = get_page_ranges_result(page_ranges)[0] - # according to the REST API documentation: - # in a highly fragmented page blob with a large number of writes, - # a Get Page Ranges request can fail due to an internal server timeout. - # thus, if the page blob is not sparse, it's ok for it to fail - except HttpResponseError: - pass - - if not self._download_complete and self._request_options.get("modified_access_conditions"): - self._request_options["modified_access_conditions"].if_match = response.properties.etag - - return response - - def chunks(self) -> Iterator[bytes]: - """ - Iterate over chunks in the download stream. Note, the iterator returned will - iterate over the entire download content, regardless of any data that was - previously read. - - NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. - - :returns: An iterator of the chunks in the download stream. - :rtype: Iterator[bytes] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world.py - :start-after: [START download_a_blob_in_chunk] - :end-before: [END download_a_blob_in_chunk] - :language: python - :dedent: 12 - :caption: Download a blob using chunks(). - """ - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") - if self._encoding: - warnings.warn("Encoding is ignored with chunks as only bytes are supported.") - - iter_downloader = None - # If we still have the first chunk buffered, use it. Otherwise, download all content again - if not self._first_chunk or not self._download_complete: - if self._first_chunk: - start = self._download_start + len(self._current_content) - current_progress = len(self._current_content) - else: - start = self._download_start - current_progress = 0 - - end = self._download_start + self.size - - iter_downloader = _ChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=current_progress, - start_range=start, - end_range=end, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - **self._request_options - ) - - initial_content = self._current_content if self._first_chunk else b'' - return _ChunkIterator( - size=self.size, - content=cast(bytes, initial_content), - downloader=iter_downloader, - chunk_size=self._config.max_chunk_get_size) - - @overload - def read(self, size: int = -1) -> T: - ... - - @overload - def read(self, *, chars: Optional[int] = None) -> T: - ... - - # pylint: disable-next=too-many-statements,too-many-branches - def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: - """ - Read the specified bytes or chars from the stream. If `encoding` - was specified on `download_blob`, it is recommended to use the - chars parameter to read a specific number of chars to avoid decoding - errors. If size/chars is unspecified or negative all bytes will be read. - - :param int size: - The number of bytes to download from the stream. Leave unspecified - or set negative to download all bytes. - :keyword Optional[int] chars: - The number of chars to download from the stream. Leave unspecified - or set negative to download all chars. Note, this can only be used - when encoding is specified on `download_blob`. - :returns: - The requested data as bytes or a string if encoding was specified. If - the return value is empty, there is no more data to read. - :rtype: T - """ - if size > -1 and self._encoding: - warnings.warn( - "Size parameter specified with text encoding enabled. It is recommended to use chars " - "to read a specific number of characters instead." - ) - if size > -1 and chars is not None: - raise ValueError("Cannot specify both size and chars.") - if not self._encoding and chars is not None: - raise ValueError("Must specify encoding to read chars.") - if self._text_mode and size > -1: - raise ValueError("Stream has been partially read in text mode. Please use chars.") - if self._text_mode is False and chars is not None: - raise ValueError("Stream has been partially read in bytes mode. Please use size.") - - # Empty blob or already read to the end - if (size == 0 or chars == 0 or - (self._download_complete and self._current_content_offset >= len(self._current_content))): - return b'' if not self._encoding else '' # type: ignore [return-value] - - if not self._text_mode and chars is not None and self._encoding is not None: - self._text_mode = True - self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') - self._current_content = self._decoder.decode( - cast(bytes, self._current_content), final=self._download_complete) - elif self._text_mode is None: - self._text_mode = False - - output_stream: Union[BytesIO, StringIO] - if self._text_mode: - output_stream = StringIO() - size = chars if chars else sys.maxsize - else: - output_stream = BytesIO() - size = size if size > 0 else sys.maxsize - readall = size == sys.maxsize - count = 0 - - # Start by reading from current_content - start = self._current_content_offset - length = min(len(self._current_content) - self._current_content_offset, size - count) - read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] - - count += read - self._current_content_offset += read - self._read_offset += read - self._check_and_report_progress() - - remaining = size - count - if remaining > 0 and not self._download_complete: - # Create a downloader than can download the rest of the file - start = self._download_start + self._download_offset - end = self._download_start + self.size - - parallel = self._max_concurrency > 1 - downloader = _ChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=self._read_offset, - start_range=start, - end_range=end, - stream=output_stream, - parallel=parallel, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - progress_hook=self._progress_hook, - **self._request_options - ) - self._first_chunk = False - - # When reading all data, have the downloader read everything into the stream. - # Else, read one chunk at a time (using the downloader as an iterator) until - # the requested size is reached. - chunks_iter = downloader.get_chunk_offsets() - if readall and not self._text_mode: - # Only do parallel if there is more than one chunk left to download - if parallel and (self.size - self._download_offset) > self._config.max_chunk_get_size: - import concurrent.futures - with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: - list(executor.map( - with_current_context(downloader.process_chunk), - downloader.get_chunk_offsets() - )) - else: - for next_chunk in chunks_iter: - downloader.process_chunk(next_chunk) - - self._complete_read() - - else: - while (chunk := next(chunks_iter, None)) is not None and remaining > 0: - chunk_data, content_length = downloader.yield_chunk(chunk) - self._download_offset += len(chunk_data) - self._raw_download_offset += content_length - if self._text_mode and self._decoder is not None: - self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) - else: - self._current_content = chunk_data - - if remaining < len(self._current_content): - read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] - else: - read = output_stream.write(self._current_content) # type: ignore [arg-type] - - self._current_content_offset = read - self._read_offset += read - remaining -= read - self._check_and_report_progress() - - data = output_stream.getvalue() - if not self._text_mode and self._encoding: - try: - # This is technically incorrect to do, but we have it for backwards compatibility. - data = cast(bytes, data).decode(self._encoding) - except UnicodeDecodeError: - warnings.warn( - "Encountered a decoding error while decoding blob data from a partial read. " - "Try using the `chars` keyword instead to read in text mode." - ) - raise - - return data # type: ignore [return-value] - - def readall(self) -> T: - """ - Read the entire contents of this blob. - This operation is blocking until all data is downloaded. - - :returns: The requested data as bytes or a string if encoding was specified. - :rtype: T - """ - return self.read() - - def readinto(self, stream: IO[bytes]) -> int: - """Download the contents of this file to a stream. - - :param IO[bytes] stream: - The stream to download to. This can be an open file-handle, - or any writable stream. The stream must be seekable if the download - uses more than one parallel connection. - :returns: The number of bytes read. - :rtype: int - """ - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") - if self._encoding: - warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") - - # The stream must be seekable if parallel download is required - parallel = self._max_concurrency > 1 - if parallel: - error_message = "Target stream handle must be seekable." - if sys.version_info >= (3,) and not stream.seekable(): - raise ValueError(error_message) - - try: - stream.seek(stream.tell()) - except (NotImplementedError, AttributeError) as exc: - raise ValueError(error_message) from exc - - # If some data has been streamed using `read`, only stream the remaining data - remaining_size = self.size - self._read_offset - # Already read to the end - if remaining_size <= 0: - return 0 - - # Write the current content to the user stream - current_remaining = len(self._current_content) - self._current_content_offset - start = self._current_content_offset - count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) - - self._current_content_offset += count - self._read_offset += count - if self._progress_hook: - self._progress_hook(self._read_offset, self.size) - - # If all the data was already downloaded/buffered - if self._download_complete: - return remaining_size - - data_start = self._download_start + self._read_offset - data_end = self._download_start + self.size - - downloader = _ChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=self._read_offset, - start_range=data_start, - end_range=data_end, - stream=stream, - parallel=parallel, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - progress_hook=self._progress_hook, - **self._request_options - ) - if parallel: - import concurrent.futures - with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: - list(executor.map( - with_current_context(downloader.process_chunk), - downloader.get_chunk_offsets() - )) - else: - for chunk in downloader.get_chunk_offsets(): - downloader.process_chunk(chunk) - - self._complete_read() - return remaining_size - - def _complete_read(self): - """Adjusts all offsets to the end of the download.""" - self._download_offset = self.size - self._raw_download_offset = self.size - self._read_offset = self.size - self._current_content_offset = len(self._current_content) - - def _check_and_report_progress(self): - """Reports progress if necessary.""" - # Only report progress at the end of each chunk and use download_offset to always report - # progress in terms of (approximate) byte count. - if self._progress_hook and self._current_content_offset == len(self._current_content): - self._progress_hook(self._download_offset, self.size) - - def content_as_bytes(self, max_concurrency=1): - """DEPRECATED: Download the contents of this file. - - This operation is blocking until all data is downloaded. - - This method is deprecated, use func:`readall` instead. - - :param int max_concurrency: - The number of parallel connections with which to download. - :returns: The contents of the file as bytes. - :rtype: bytes - """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "content_as_bytes is not supported in text mode.") - - self._max_concurrency = max_concurrency - return self.readall() - - def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """DEPRECATED: Download the contents of this blob, and decode as text. - - This operation is blocking until all data is downloaded. - - This method is deprecated, use func:`readall` instead. - - :param int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :returns: The content of the file as a str. - :rtype: str - """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "content_as_text is not supported in text mode.") - - self._max_concurrency = max_concurrency - self._encoding = encoding - return self.readall() - - def download_to_stream(self, stream, max_concurrency=1): - """DEPRECATED: Download the contents of this blob to a stream. - - This method is deprecated, use func:`readinto` instead. - - :param IO[T] stream: - The stream to download to. This can be an open file-handle, - or any writable stream. The stream must be seekable if the download - uses more than one parallel connection. - :param int max_concurrency: - The number of parallel connections with which to download. - :returns: The properties of the downloaded blob. - :rtype: Any - """ - warnings.warn( - "download_to_stream is deprecated, use readinto instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "download_to_stream is not supported in text mode.") - - self._max_concurrency = max_concurrency - self.readinto(stream) - return self.properties diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py deleted file mode 100644 index b0f217bfe43b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py +++ /dev/null @@ -1,1122 +0,0 @@ -# pylint: disable=too-many-lines -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import math -import os -import sys -import warnings -from collections import OrderedDict -from io import BytesIO -from json import ( - dumps, - loads, -) -from typing import Any, Callable, Dict, IO, Optional, Tuple, TYPE_CHECKING -from typing import OrderedDict as TypedOrderedDict -from typing_extensions import Protocol - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.aead import AESGCM -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import CBC -from cryptography.hazmat.primitives.padding import PKCS7 - -from azure.core.exceptions import HttpResponseError -from azure.core.utils import CaseInsensitiveDict - -from ._version import VERSION -from ._shared import decode_base64_to_bytes, encode_base64 - -if TYPE_CHECKING: - from azure.core.pipeline import PipelineResponse - from cryptography.hazmat.primitives.ciphers import AEADEncryptionContext - from cryptography.hazmat.primitives.padding import PaddingContext - - -_ENCRYPTION_PROTOCOL_V1 = '1.0' -_ENCRYPTION_PROTOCOL_V2 = '2.0' -_GCM_REGION_DATA_LENGTH = 4 * 1024 * 1024 -_GCM_NONCE_LENGTH = 12 -_GCM_TAG_LENGTH = 16 - -_ERROR_OBJECT_INVALID = \ - '{0} does not define a complete interface. Value of {1} is either missing or invalid.' - -_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( - 'The require_encryption flag is set, but encryption is not supported' - ' for this method.') - - -class KeyEncryptionKey(Protocol): - - def wrap_key(self, key: bytes) -> bytes: - ... - - def unwrap_key(self, key: bytes, algorithm: str) -> bytes: - ... - - def get_kid(self) -> str: - ... - - def get_key_wrap_algorithm(self) -> str: - ... - - -def _validate_not_none(param_name: str, param: Any): - if param is None: - raise ValueError(f'{param_name} should not be None.') - - -def _validate_key_encryption_key_wrap(kek: KeyEncryptionKey): - # Note that None is not callable and so will fail the second clause of each check. - if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) - if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) - if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) - - -class StorageEncryptionMixin(object): - def _configure_encryption(self, kwargs: Dict[str, Any]): - self.require_encryption = kwargs.get("require_encryption", False) - self.encryption_version = kwargs.get("encryption_version", "1.0") - self.key_encryption_key = kwargs.get("key_encryption_key") - self.key_resolver_function = kwargs.get("key_resolver_function") - if self.key_encryption_key and self.encryption_version == '1.0': - warnings.warn("This client has been configured to use encryption with version 1.0. " + - "Version 1.0 is deprecated and no longer considered secure. It is highly " + - "recommended that you switch to using version 2.0. The version can be " + - "specified using the 'encryption_version' keyword.") - - -class _EncryptionAlgorithm(object): - """ - Specifies which client encryption algorithm is used. - """ - AES_CBC_256 = 'AES_CBC_256' - AES_GCM_256 = 'AES_GCM_256' - - -class _WrappedContentKey: - """ - Represents the envelope key details stored on the service. - """ - - def __init__(self, algorithm: str, encrypted_key: bytes, key_id: str) -> None: - """ - :param str algorithm: - The algorithm used for wrapping. - :param bytes encrypted_key: - The encrypted content-encryption-key. - :param str key_id: - The key-encryption-key identifier string. - """ - _validate_not_none('algorithm', algorithm) - _validate_not_none('encrypted_key', encrypted_key) - _validate_not_none('key_id', key_id) - - self.algorithm = algorithm - self.encrypted_key = encrypted_key - self.key_id = key_id - - -class _EncryptedRegionInfo: - """ - Represents the length of encryption elements. - This is only used for Encryption V2. - """ - - def __init__(self, data_length: int, nonce_length: int, tag_length: int) -> None: - """ - :param int data_length: - The length of the encryption region data (not including nonce + tag). - :param int nonce_length: - The length of nonce used when encrypting. - :param int tag_length: - The length of the encryption tag. - """ - _validate_not_none('data_length', data_length) - _validate_not_none('nonce_length', nonce_length) - _validate_not_none('tag_length', tag_length) - - self.data_length = data_length - self.nonce_length = nonce_length - self.tag_length = tag_length - - -class _EncryptionAgent: - """ - Represents the encryption agent stored on the service. - It consists of the encryption protocol version and encryption algorithm used. - """ - - def __init__(self, encryption_algorithm: _EncryptionAlgorithm, protocol: str) -> None: - """ - :param _EncryptionAlgorithm encryption_algorithm: - The algorithm used for encrypting the message contents. - :param str protocol: - The protocol version used for encryption. - """ - _validate_not_none('encryption_algorithm', encryption_algorithm) - _validate_not_none('protocol', protocol) - - self.encryption_algorithm = str(encryption_algorithm) - self.protocol = protocol - - -class _EncryptionData: - """ - Represents the encryption data that is stored on the service. - """ - - def __init__( - self, content_encryption_IV: Optional[bytes], - encrypted_region_info: Optional[_EncryptedRegionInfo], - encryption_agent: _EncryptionAgent, - wrapped_content_key: _WrappedContentKey, - key_wrapping_metadata: Dict[str, Any] - ) -> None: - """ - :param Optional[bytes] content_encryption_IV: - The content encryption initialization vector. - Required for AES-CBC (V1). - :param Optional[_EncryptedRegionInfo] encrypted_region_info: - The info about the autenticated block sizes. - Required for AES-GCM (V2). - :param _EncryptionAgent encryption_agent: - The encryption agent. - :param _WrappedContentKey wrapped_content_key: - An object that stores the wrapping algorithm, the key identifier, - and the encrypted key bytes. - :param Dict[str, Any] key_wrapping_metadata: - A dict containing metadata related to the key wrapping. - """ - _validate_not_none('encryption_agent', encryption_agent) - _validate_not_none('wrapped_content_key', wrapped_content_key) - - # Validate we have the right matching optional parameter for the specified algorithm - if encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_CBC_256: - _validate_not_none('content_encryption_IV', content_encryption_IV) - elif encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_GCM_256: - _validate_not_none('encrypted_region_info', encrypted_region_info) - else: - raise ValueError("Invalid encryption algorithm.") - - self.content_encryption_IV = content_encryption_IV - self.encrypted_region_info = encrypted_region_info - self.encryption_agent = encryption_agent - self.wrapped_content_key = wrapped_content_key - self.key_wrapping_metadata = key_wrapping_metadata - - -class GCMBlobEncryptionStream: - """ - A stream that performs AES-GCM encryption on the given data as - it's streamed. Data is read and encrypted in regions. The stream - will use the same encryption key and will generate a guaranteed unique - nonce for each encryption region. - """ - def __init__( - self, content_encryption_key: bytes, - data_stream: IO[bytes], - ) -> None: - """ - :param bytes content_encryption_key: The encryption key to use. - :param IO[bytes] data_stream: The data stream to read data from. - """ - self.content_encryption_key = content_encryption_key - self.data_stream = data_stream - - self.offset = 0 - self.current = b'' - self.nonce_counter = 0 - - def read(self, size: int = -1) -> bytes: - """ - Read data from the stream. Specify -1 to read all available data. - - :param int size: The amount of data to read. Defaults to -1 for all data. - :return: The bytes read. - :rtype: bytes - """ - result = BytesIO() - remaining = sys.maxsize if size == -1 else size - - while remaining > 0: - # Start by reading from current - if len(self.current) > 0: - read = min(remaining, len(self.current)) - result.write(self.current[:read]) - - self.current = self.current[read:] - self.offset += read - remaining -= read - - if remaining > 0: - # Read one region of data and encrypt it - data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) - if len(data) == 0: - # No more data to read - break - - self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) - # IMPORTANT: Must increment the nonce each time. - self.nonce_counter += 1 - - return result.getvalue() - - -def encrypt_data_v2(data: bytes, nonce: int, key: bytes) -> bytes: - """ - Encrypts the given data using the given nonce and key using AES-GCM. - The result includes the data in the form: nonce + ciphertext + tag. - - :param bytes data: The raw data to encrypt. - :param int nonce: The nonce to use for encryption. - :param bytes key: The encryption key to use for encryption. - :return: The encrypted bytes in the form: nonce + ciphertext + tag. - :rtype: bytes - """ - nonce_bytes = nonce.to_bytes(_GCM_NONCE_LENGTH, 'big') - aesgcm = AESGCM(key) - - # Returns ciphertext + tag - ciphertext_with_tag = aesgcm.encrypt(nonce_bytes, data, None) - return nonce_bytes + ciphertext_with_tag - - -def is_encryption_v2(encryption_data: Optional[_EncryptionData]) -> bool: - """ - Determine whether the given encryption data signifies version 2.0. - - :param Optional[_EncryptionData] encryption_data: The encryption data. Will return False if this is None. - :return: True, if the encryption data indicates encryption V2, false otherwise. - :rtype: bool - """ - # If encryption_data is None, assume no encryption - return bool(encryption_data and (encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V2)) - - -def modify_user_agent_for_encryption( - user_agent: str, - moniker: str, - encryption_version: str, - request_options: Dict[str, Any] - ) -> None: - """ - Modifies the request options to contain a user agent string updated with encryption information. - Adds azstorage-clientsideencryption/ immediately proceeding the SDK descriptor. - - :param str user_agent: The existing User Agent to modify. - :param str moniker: The specific SDK moniker. The modification will immediately proceed azsdk-python-{moniker}. - :param str encryption_version: The version of encryption being used. - :param Dict[str, Any] request_options: The reuqest options to add the user agent override to. - """ - # If the user has specified user_agent_overwrite=True, don't make any modifications - if request_options.get('user_agent_overwrite'): - return - - # If the feature flag is already present, don't add it again - feature_flag = f"azstorage-clientsideencryption/{encryption_version}" - if feature_flag in user_agent: - return - - index = user_agent.find(f"azsdk-python-{moniker}") - user_agent = f"{user_agent[:index]}{feature_flag} {user_agent[index:]}" - # Since we are using user_agent_overwrite=True, we must prepend the user's user_agent if there is one - if request_options.get('user_agent'): - user_agent = f"{request_options.get('user_agent')} {user_agent}" - - request_options['user_agent'] = user_agent - request_options['user_agent_overwrite'] = True - - -def get_adjusted_upload_size(length: int, encryption_version: str) -> int: - """ - Get the adjusted size of the blob upload which accounts for - extra encryption data (padding OR nonce + tag). - - :param int length: The plaintext data length. - :param str encryption_version: The version of encryption being used. - :return: The new upload size to use. - :rtype: int - """ - if encryption_version == _ENCRYPTION_PROTOCOL_V1: - return length + (16 - (length % 16)) - - if encryption_version == _ENCRYPTION_PROTOCOL_V2: - encryption_data_length = _GCM_NONCE_LENGTH + _GCM_TAG_LENGTH - regions = math.ceil(length / _GCM_REGION_DATA_LENGTH) - return length + (regions * encryption_data_length) - - raise ValueError("Invalid encryption version specified.") - - -def get_adjusted_download_range_and_offset( - start: int, - end: int, - length: Optional[int], - encryption_data: Optional[_EncryptionData]) -> Tuple[Tuple[int, int], Tuple[int, int]]: - """ - Gets the new download range and offsets into the decrypted data for - the given user-specified range. The new download range will include all - the data needed to decrypt the user-provided range and will include only - full encryption regions. - - The offsets returned will be the offsets needed to fetch the user-requested - data out of the full decrypted data. The end offset is different based on the - encryption version. For V1, the end offset is offset from the end whereas for - V2, the end offset is the ending index into the stream. - V1: decrypted_data[start_offset : len(decrypted_data) - end_offset] - V2: decrypted_data[start_offset : end_offset] - - :param int start: The user-requested start index. - :param int end: The user-requested end index. - :param Optional[int] length: The user-requested length. Only used for V1. - :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. - :return: (new start, new end), (start offset, end offset) - :rtype: Tuple[Tuple[int, int], Tuple[int, int]] - """ - start_offset, end_offset = 0, 0 - if encryption_data is None: - return (start, end), (start_offset, end_offset) - - if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: - if start is not None: - # Align the start of the range along a 16 byte block - start_offset = start % 16 - start -= start_offset - - # Include an extra 16 bytes for the IV if necessary - # Because of the previous offsetting, start_range will always - # be a multiple of 16. - if start > 0: - start_offset += 16 - start -= 16 - - if length is not None: - # Align the end of the range along a 16 byte block - end_offset = 15 - (end % 16) - end += end_offset - - elif encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V2: - start_offset, end_offset = 0, end - - if encryption_data.encrypted_region_info is None: - raise ValueError("Missing required metadata for Encryption V2") - - nonce_length = encryption_data.encrypted_region_info.nonce_length - data_length = encryption_data.encrypted_region_info.data_length - tag_length = encryption_data.encrypted_region_info.tag_length - region_length = nonce_length + data_length + tag_length - requested_length = end - start - - if start is not None: - # Find which data region the start is in - region_num = start // data_length - # The start of the data region is different from the start of the encryption region - data_start = region_num * data_length - region_start = region_num * region_length - # Offset is based on data region - start_offset = start - data_start - # New start is the start of the encryption region - start = region_start - - if end is not None: - # Find which data region the end is in - region_num = end // data_length - end_offset = start_offset + requested_length + 1 - # New end is the end of the encryption region - end = (region_num * region_length) + region_length - 1 - - return (start, end), (start_offset, end_offset) - - -def parse_encryption_data(metadata: Dict[str, Any]) -> Optional[_EncryptionData]: - """ - Parses the encryption data out of the given blob metadata. If metadata does - not exist or there are parsing errors, this function will just return None. - - :param Dict[str, Any] metadata: The blob metadata parsed from the response. - :return: The encryption data or None - :rtype: Optional[_EncryptionData] - """ - try: - # Use case insensitive dict as key needs to be case-insensitive - case_insensitive_metadata = CaseInsensitiveDict(metadata) - return _dict_to_encryption_data(loads(case_insensitive_metadata['encryptiondata'])) - except: # pylint: disable=bare-except - return None - - -def adjust_blob_size_for_encryption(size: int, encryption_data: Optional[_EncryptionData]) -> int: - """ - Adjusts the given blob size for encryption by subtracting the size of - the encryption data (nonce + tag). This only has an affect for encryption V2. - - :param int size: The original blob size. - :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. - :return: The new blob size. - :rtype: int - """ - if (encryption_data is not None and - encryption_data.encrypted_region_info is not None and - is_encryption_v2(encryption_data)): - - nonce_length = encryption_data.encrypted_region_info.nonce_length - data_length = encryption_data.encrypted_region_info.data_length - tag_length = encryption_data.encrypted_region_info.tag_length - region_length = nonce_length + data_length + tag_length - - num_regions = math.ceil(size / region_length) - metadata_size = num_regions * (nonce_length + tag_length) - return size - metadata_size - - return size - - -def _generate_encryption_data_dict( - kek: KeyEncryptionKey, - cek: bytes, - iv: Optional[bytes], - version: str - ) -> TypedOrderedDict[str, Any]: - """ - Generates and returns the encryption metadata as a dict. - - :param KeyEncryptionKey kek: The key encryption key. See calling functions for more information. - :param bytes cek: The content encryption key. - :param Optional[bytes] iv: The initialization vector. Only required for AES-CBC. - :param str version: The client encryption version used. - :return: A dict containing all the encryption metadata. - :rtype: Dict[str, Any] - """ - # Encrypt the cek. - if version == _ENCRYPTION_PROTOCOL_V1: - wrapped_cek = kek.wrap_key(cek) - # For V2, we include the encryption version in the wrapped key. - elif version == _ENCRYPTION_PROTOCOL_V2: - # We must pad the version to 8 bytes for AES Keywrap algorithms - to_wrap = _ENCRYPTION_PROTOCOL_V2.encode().ljust(8, b'\0') + cek - wrapped_cek = kek.wrap_key(to_wrap) - - # Build the encryption_data dict. - # Use OrderedDict to comply with Java's ordering requirement. - wrapped_content_key = OrderedDict() - wrapped_content_key['KeyId'] = kek.get_kid() - wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) - wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() - - encryption_agent = OrderedDict() - encryption_agent['Protocol'] = version - - if version == _ENCRYPTION_PROTOCOL_V1: - encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 - - elif version == _ENCRYPTION_PROTOCOL_V2: - encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_GCM_256 - - encrypted_region_info = OrderedDict() - encrypted_region_info['DataLength'] = _GCM_REGION_DATA_LENGTH - encrypted_region_info['NonceLength'] = _GCM_NONCE_LENGTH - - encryption_data_dict: TypedOrderedDict[str, Any] = OrderedDict() - encryption_data_dict['WrappedContentKey'] = wrapped_content_key - encryption_data_dict['EncryptionAgent'] = encryption_agent - if version == _ENCRYPTION_PROTOCOL_V1: - encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) - elif version == _ENCRYPTION_PROTOCOL_V2: - encryption_data_dict['EncryptedRegionInfo'] = encrypted_region_info - encryption_data_dict['KeyWrappingMetadata'] = OrderedDict({'EncryptionLibrary': 'Python ' + VERSION}) - - return encryption_data_dict - - -def _dict_to_encryption_data(encryption_data_dict: Dict[str, Any]) -> _EncryptionData: - """ - Converts the specified dictionary to an EncryptionData object for - eventual use in decryption. - - :param dict encryption_data_dict: - The dictionary containing the encryption data. - :return: an _EncryptionData object built from the dictionary. - :rtype: _EncryptionData - """ - try: - protocol = encryption_data_dict['EncryptionAgent']['Protocol'] - if protocol not in [_ENCRYPTION_PROTOCOL_V1, _ENCRYPTION_PROTOCOL_V2]: - raise ValueError("Unsupported encryption version.") - except KeyError as exc: - raise ValueError("Unsupported encryption version.") from exc - wrapped_content_key = encryption_data_dict['WrappedContentKey'] - wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], - decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), - wrapped_content_key['KeyId']) - - encryption_agent = encryption_data_dict['EncryptionAgent'] - encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], - encryption_agent['Protocol']) - - if 'KeyWrappingMetadata' in encryption_data_dict: - key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] - else: - key_wrapping_metadata = None - - # AES-CBC only - encryption_iv = None - if 'ContentEncryptionIV' in encryption_data_dict: - encryption_iv = decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']) - - # AES-GCM only - region_info = None - if 'EncryptedRegionInfo' in encryption_data_dict: - encrypted_region_info = encryption_data_dict['EncryptedRegionInfo'] - region_info = _EncryptedRegionInfo(encrypted_region_info['DataLength'], - encrypted_region_info['NonceLength'], - _GCM_TAG_LENGTH) - - encryption_data = _EncryptionData(encryption_iv, - region_info, - encryption_agent, - wrapped_content_key, - key_wrapping_metadata) - - return encryption_data - - -def _generate_AES_CBC_cipher(cek: bytes, iv: bytes) -> Cipher: - """ - Generates and returns an encryption cipher for AES CBC using the given cek and iv. - - :param bytes[] cek: The content encryption key for the cipher. - :param bytes[] iv: The initialization vector for the cipher. - :return: A cipher for encrypting in AES256 CBC. - :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher - """ - - backend = default_backend() - algorithm = AES(cek) - mode = CBC(iv) - return Cipher(algorithm, mode, backend) - - -def _validate_and_unwrap_cek( - encryption_data: _EncryptionData, - key_encryption_key: Optional[KeyEncryptionKey] = None, - key_resolver: Optional[Callable[[str], KeyEncryptionKey]] = None -) -> bytes: - """ - Extracts and returns the content_encryption_key stored in the encryption_data object - and performs necessary validation on all parameters. - :param _EncryptionData encryption_data: - The encryption metadata of the retrieved value. - :param Optional[KeyEncryptionKey] key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param Optional[Callable[[str], KeyEncryptionKey]] key_resolver: - A function used that, given a key_id, will return a key_encryption_key. Please refer - to high-level service object instance variables for more details. - :return: The content_encryption_key stored in the encryption_data object. - :rtype: bytes - """ - - _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) - - # Validate we have the right info for the specified version - if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: - _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) - elif encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V2: - _validate_not_none('encrypted_region_info', encryption_data.encrypted_region_info) - else: - raise ValueError('Specified encryption version is not supported.') - - content_encryption_key: Optional[bytes] = None - - # If the resolver exists, give priority to the key it finds. - if key_resolver is not None: - key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) - - if key_encryption_key is None: - raise ValueError("Unable to decrypt. key_resolver and key_encryption_key cannot both be None.") - if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) - if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) - if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): - raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') - # Will throw an exception if the specified algorithm is not supported. - content_encryption_key = key_encryption_key.unwrap_key( - encryption_data.wrapped_content_key.encrypted_key, - encryption_data.wrapped_content_key.algorithm) - - # For V2, the version is included with the cek. We need to validate it - # and remove it from the actual cek. - if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V2: - version_2_bytes = _ENCRYPTION_PROTOCOL_V2.encode().ljust(8, b'\0') - cek_version_bytes = content_encryption_key[:len(version_2_bytes)] - if cek_version_bytes != version_2_bytes: - raise ValueError('The encryption metadata is not valid and may have been modified.') - - # Remove version from the start of the cek. - content_encryption_key = content_encryption_key[len(version_2_bytes):] - - _validate_not_none('content_encryption_key', content_encryption_key) - - return content_encryption_key - - -def _decrypt_message( - message: bytes, - encryption_data: _EncryptionData, - key_encryption_key: Optional[KeyEncryptionKey] = None, - resolver: Optional[Callable[[str], KeyEncryptionKey]] = None -) -> bytes: - """ - Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. - Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). - Returns the original plaintext. - - :param bytes message: - The ciphertext to be decrypted. - :param _EncryptionData encryption_data: - The metadata associated with this ciphertext. - :param Optional[KeyEncryptionKey] key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param Optional[Callable[[str], KeyEncryptionKey]] resolver: - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :return: The decrypted plaintext. - :rtype: bytes - """ - _validate_not_none('message', message) - content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) - - if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: - if not encryption_data.content_encryption_IV: - raise ValueError("Missing required metadata for decryption.") - - cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) - - # decrypt data - decryptor = cipher.decryptor() - decrypted_data = (decryptor.update(message) + decryptor.finalize()) - - # unpad data - unpadder = PKCS7(128).unpadder() - decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) - - elif encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V2: - block_info = encryption_data.encrypted_region_info - if not block_info or not block_info.nonce_length: - raise ValueError("Missing required metadata for decryption.") - - if encryption_data.encrypted_region_info is None: - raise ValueError("Missing required metadata for Encryption V2") - - nonce_length = int(encryption_data.encrypted_region_info.nonce_length) - - # First bytes are the nonce - nonce = message[:nonce_length] - ciphertext_with_tag = message[nonce_length:] - - aesgcm = AESGCM(content_encryption_key) - decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) - - else: - raise ValueError('Specified encryption version is not supported.') - - return decrypted_data - - -def encrypt_blob(blob: bytes, key_encryption_key: KeyEncryptionKey, version: str) -> Tuple[str, bytes]: - """ - Encrypts the given blob using the given encryption protocol version. - Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). - Returns a json-formatted string containing the encryption metadata. This method should - only be used when a blob is small enough for single shot upload. Encrypting larger blobs - is done as a part of the upload_data_chunks method. - - :param bytes blob: - The blob to be encrypted. - :param KeyEncryptionKey key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param str version: The client encryption version to use. - :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. - :rtype: (str, bytes) - """ - - _validate_not_none('blob', blob) - _validate_not_none('key_encryption_key', key_encryption_key) - _validate_key_encryption_key_wrap(key_encryption_key) - - if version == _ENCRYPTION_PROTOCOL_V1: - # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks - content_encryption_key = os.urandom(32) - initialization_vector = os.urandom(16) - - cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) - - # PKCS7 with 16 byte blocks ensures compatibility with AES. - padder = PKCS7(128).padder() - padded_data = padder.update(blob) + padder.finalize() - - # Encrypt the data. - encryptor = cipher.encryptor() - encrypted_data = encryptor.update(padded_data) + encryptor.finalize() - - elif version == _ENCRYPTION_PROTOCOL_V2: - # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. - content_encryption_key = os.urandom(32) - initialization_vector = None - - data = BytesIO(blob) - encryption_stream = GCMBlobEncryptionStream(content_encryption_key, data) - - encrypted_data = encryption_stream.read() - - else: - raise ValueError("Invalid encryption version specified.") - - encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, - initialization_vector, version) - encryption_data['EncryptionMode'] = 'FullBlob' - - return dumps(encryption_data), encrypted_data - - -def generate_blob_encryption_data( - key_encryption_key: Optional[KeyEncryptionKey], - version: str -) -> Tuple[Optional[bytes], Optional[bytes], Optional[str]]: - """ - Generates the encryption_metadata for the blob. - - :param Optional[KeyEncryptionKey] key_encryption_key: - The key-encryption-key used to wrap the cek associate with this blob. - :param str version: The client encryption version to use. - :return: A tuple containing the cek and iv for this blob as well as the - serialized encryption metadata for the blob. - :rtype: (Optional[bytes], Optional[bytes], Optional[str]) - """ - - encryption_data = None - content_encryption_key = None - initialization_vector = None - if key_encryption_key: - _validate_key_encryption_key_wrap(key_encryption_key) - content_encryption_key = os.urandom(32) - # Initialization vector only needed for V1 - if version == _ENCRYPTION_PROTOCOL_V1: - initialization_vector = os.urandom(16) - encryption_data_dict = _generate_encryption_data_dict(key_encryption_key, - content_encryption_key, - initialization_vector, - version) - encryption_data_dict['EncryptionMode'] = 'FullBlob' - encryption_data = dumps(encryption_data_dict) - - return content_encryption_key, initialization_vector, encryption_data - - -def decrypt_blob( # pylint: disable=too-many-locals,too-many-statements - require_encryption: bool, - key_encryption_key: Optional[KeyEncryptionKey], - key_resolver: Optional[Callable[[str], KeyEncryptionKey]], - content: bytes, - start_offset: int, - end_offset: int, - response_headers: Dict[str, Any] -) -> bytes: - """ - Decrypts the given blob contents and returns only the requested range. - - :param bool require_encryption: - Whether the calling blob service requires objects to be decrypted. - :param Optional[KeyEncryptionKey] key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param key_resolver: - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :type key_resolver: Optional[Callable[[str], KeyEncryptionKey]] - :param bytes content: - The encrypted blob content. - :param int start_offset: - The adjusted offset from the beginning of the *decrypted* content for the caller's data. - :param int end_offset: - The adjusted offset from the end of the *decrypted* content for the caller's data. - :param Dict[str, Any] response_headers: - A dictionary of response headers from the download request. Expected to include the - 'x-ms-meta-encryptiondata' header if the blob was encrypted. - :return: The decrypted blob content. - :rtype: bytes - """ - try: - encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) - except Exception as exc: # pylint: disable=broad-except - if require_encryption: - raise ValueError( - 'Encryption required, but received data does not contain appropriate metadata.' + \ - 'Data was either not encrypted or metadata has been lost.') from exc - - return content - - algorithm = encryption_data.encryption_agent.encryption_algorithm - if algorithm not in(_EncryptionAlgorithm.AES_CBC_256, _EncryptionAlgorithm.AES_GCM_256): - raise ValueError('Specified encryption algorithm is not supported.') - - version = encryption_data.encryption_agent.protocol - if version not in (_ENCRYPTION_PROTOCOL_V1, _ENCRYPTION_PROTOCOL_V2): - raise ValueError('Specified encryption version is not supported.') - - content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) - - if version == _ENCRYPTION_PROTOCOL_V1: - blob_type = response_headers['x-ms-blob-type'] - - iv: Optional[bytes] = None - unpad = False - if 'content-range' in response_headers: - content_range = response_headers['content-range'] - # Format: 'bytes x-y/size' - - # Ignore the word 'bytes' - content_range = content_range.split(' ') - - content_range = content_range[1].split('-') - content_range = content_range[1].split('/') - end_range = int(content_range[0]) - blob_size = int(content_range[1]) - - if start_offset >= 16: - iv = content[:16] - content = content[16:] - start_offset -= 16 - else: - iv = encryption_data.content_encryption_IV - - if end_range == blob_size - 1: - unpad = True - else: - unpad = True - iv = encryption_data.content_encryption_IV - - if blob_type == 'PageBlob': - unpad = False - - if iv is None: - raise ValueError("Missing required metadata for Encryption V1") - - cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) - decryptor = cipher.decryptor() - - content = decryptor.update(content) + decryptor.finalize() - if unpad: - unpadder = PKCS7(128).unpadder() - content = unpadder.update(content) + unpadder.finalize() - - return content[start_offset: len(content) - end_offset] - - if version == _ENCRYPTION_PROTOCOL_V2: - # We assume the content contains only full encryption regions - total_size = len(content) - offset = 0 - - if encryption_data.encrypted_region_info is None: - raise ValueError("Missing required metadata for Encryption V2") - - nonce_length = encryption_data.encrypted_region_info.nonce_length - data_length = encryption_data.encrypted_region_info.data_length - tag_length = encryption_data.encrypted_region_info.tag_length - region_length = nonce_length + data_length + tag_length - - decrypted_content = bytearray() - while offset < total_size: - # Process one encryption region at a time - process_size = min(region_length, total_size) - encrypted_region = content[offset:offset + process_size] - - # First bytes are the nonce - nonce = encrypted_region[:nonce_length] - ciphertext_with_tag = encrypted_region[nonce_length:] - - aesgcm = AESGCM(content_encryption_key) - decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) - decrypted_content.extend(decrypted_data) - - offset += process_size - - # Read the caller requested data from the decrypted content - return decrypted_content[start_offset:end_offset] - - raise ValueError('Specified encryption version is not supported.') - - -def get_blob_encryptor_and_padder( - cek: Optional[bytes], - iv: Optional[bytes], - should_pad: bool -) -> Tuple[Optional["AEADEncryptionContext"], Optional["PaddingContext"]]: - encryptor = None - padder = None - - if cek is not None and iv is not None: - cipher = _generate_AES_CBC_cipher(cek, iv) - encryptor = cipher.encryptor() - padder = PKCS7(128).padder() if should_pad else None - - return encryptor, padder - - -def encrypt_queue_message(message: str, key_encryption_key: KeyEncryptionKey, version: str) -> str: - """ - Encrypts the given plain text message using the given protocol version. - Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). - Returns a json-formatted string containing the encrypted message and the encryption metadata. - - :param str message: - The plain text message to be encrypted. - :param KeyEncryptionKey key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param str version: The client encryption version to use. - :return: A json-formatted string containing the encrypted message and the encryption metadata. - :rtype: str - """ - - _validate_not_none('message', message) - _validate_not_none('key_encryption_key', key_encryption_key) - _validate_key_encryption_key_wrap(key_encryption_key) - - # Queue encoding functions all return unicode strings, and encryption should - # operate on binary strings. - message_as_bytes: bytes = message.encode('utf-8') - - if version == _ENCRYPTION_PROTOCOL_V1: - # AES256 CBC uses 256 bit (32 byte) keys and always with 16 byte blocks - content_encryption_key = os.urandom(32) - initialization_vector = os.urandom(16) - - cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) - - # PKCS7 with 16 byte blocks ensures compatibility with AES. - padder = PKCS7(128).padder() - padded_data = padder.update(message_as_bytes) + padder.finalize() - - # Encrypt the data. - encryptor = cipher.encryptor() - encrypted_data = encryptor.update(padded_data) + encryptor.finalize() - - elif version == _ENCRYPTION_PROTOCOL_V2: - # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. - content_encryption_key = os.urandom(32) - initialization_vector = None - - # The nonce MUST be different for each key - nonce = os.urandom(12) - aesgcm = AESGCM(content_encryption_key) - - # Returns ciphertext + tag - cipertext_with_tag = aesgcm.encrypt(nonce, message_as_bytes, None) - encrypted_data = nonce + cipertext_with_tag - - else: - raise ValueError("Invalid encryption version specified.") - - # Build the dictionary structure. - queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), - 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, - content_encryption_key, - initialization_vector, - version)} - - return dumps(queue_message) - - -def decrypt_queue_message( - message: str, - response: "PipelineResponse", - require_encryption: bool, - key_encryption_key: Optional[KeyEncryptionKey], - resolver: Optional[Callable[[str], KeyEncryptionKey]] -) -> str: - """ - Returns the decrypted message contents from an EncryptedQueueMessage. - If no encryption metadata is present, will return the unaltered message. - :param str message: - The JSON formatted QueueEncryptedMessage contents with all associated metadata. - :param Any response: - The pipeline response used to generate an error with. - :param bool require_encryption: - If set, will enforce that the retrieved messages are encrypted and decrypt them. - :param Optional[KeyEncryptionKey] key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key) - - Wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm() - - Returns the algorithm used to wrap the specified symmetric key. - get_kid() - - Returns a string key id for this key-encryption-key. - :param Optional[Callable[[str], KeyEncryptionKey]] resolver: - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :return: The plain text message from the queue message. - :rtype: str - """ - response = response.http_response - - try: - deserialized_message: Dict[str, Any] = loads(message) - - encryption_data = _dict_to_encryption_data(deserialized_message['EncryptionData']) - decoded_data = decode_base64_to_bytes(deserialized_message['EncryptedMessageContents']) - except (KeyError, ValueError) as exc: - # Message was not json formatted and so was not encrypted - # or the user provided a json formatted message - # or the metadata was malformed. - if require_encryption: - raise ValueError( - 'Encryption required, but received message does not contain appropriate metatadata. ' + \ - 'Message was either not encrypted or metadata was incorrect.') from exc - - return message - try: - return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') - except Exception as error: - raise HttpResponseError( - message="Decryption failed.", - response=response, #type: ignore [arg-type] - error=error) from error diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/__init__.py deleted file mode 100644 index 62dc43a7722a..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._azure_blob_storage import AzureBlobStorage - -try: - from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "AzureBlobStorage", -] -__all__.extend([p for p in _patch_all if p not in __all__]) - -_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py deleted file mode 100644 index 4de4871f14b6..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Literal - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long - """Configuration for AzureBlobStorage. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param url: The URL of the service account, container, or blob that is the target of the - desired operation. Required. - :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2024-08-04". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str - """ - - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2024-08-04"] = kwargs.pop("version", "2024-08-04") - - if url is None: - raise ValueError("Parameter 'url' must not be None.") - - self.url = url - self.version = version - kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_patch.py deleted file mode 100644 index 4688ca7f8ac2..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_patch.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# This file is used for handwritten extensions to the generated code. Example: -# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md - - -def patch_sdk(): - pass diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/__init__.py deleted file mode 100644 index 62dc43a7722a..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._azure_blob_storage import AzureBlobStorage - -try: - from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "AzureBlobStorage", -] -__all__.extend([p for p in _patch_all if p not in __all__]) - -_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py deleted file mode 100644 index 5960c22f03f2..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Literal - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long - """Configuration for AzureBlobStorage. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param url: The URL of the service account, container, or blob that is the target of the - desired operation. Required. - :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2024-08-04". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str - """ - - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2024-08-04"] = kwargs.pop("version", "2024-08-04") - - if url is None: - raise ValueError("Parameter 'url' must not be None.") - - self.url = url - self.version = version - kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_patch.py deleted file mode 100644 index 4688ca7f8ac2..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_patch.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# This file is used for handwritten extensions to the generated code. Example: -# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md - - -def patch_sdk(): - pass diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py deleted file mode 100644 index 70b5d865e6d2..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ /dev/null @@ -1,720 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._append_blob_operations import ( - build_append_block_from_url_request, - build_append_block_request, - build_create_request, - build_seal_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class AppendBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`append_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def create( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create Append Blob operation creates a new append blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_create_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def append_block( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Append Block operation commits a new block of data to the end of an existing append blob. - The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _max_size = None - _append_position = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - _max_size = append_position_access_conditions.max_size - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_append_block_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - lease_id=_lease_id, - max_size=_max_size, - append_position=_append_position, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-append-offset"] = self._deserialize( - "str", response.headers.get("x-ms-blob-append-offset") - ) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def append_block_from_url( # pylint: disable=inconsistent-return-statements - self, - source_url: str, - content_length: int, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Append Block operation commits a new block of data to the end of an existing append blob - where the contents are read from a source url. The Append Block operation is permitted only if - the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on - version 2015-02-21 version or later. - - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param content_length: The length of the request. Required. - :type content_length: int - :param source_range: Bytes of source data in the specified range. Default value is None. - :type source_range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _max_size = None - _append_position = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - _max_size = append_position_access_conditions.max_size - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_append_block_from_url_request( - url=self._config.url, - source_url=source_url, - content_length=content_length, - source_range=source_range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - max_size=_max_size, - append_position=_append_position, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-append-offset"] = self._deserialize( - "str", response.headers.get("x-ms-blob-append-offset") - ) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def seal( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on - version 2019-12-12 version or later. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _append_position = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - - _request = build_seal_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - append_position=_append_position, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py deleted file mode 100644 index 9cf0b47f035d..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ /dev/null @@ -1,3118 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, AsyncIterator, Callable, Dict, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._blob_operations import ( - build_abort_copy_from_url_request, - build_acquire_lease_request, - build_break_lease_request, - build_change_lease_request, - build_copy_from_url_request, - build_create_snapshot_request, - build_delete_immutability_policy_request, - build_delete_request, - build_download_request, - build_get_account_info_request, - build_get_properties_request, - build_get_tags_request, - build_query_request, - build_release_lease_request, - build_renew_lease_request, - build_set_expiry_request, - build_set_http_headers_request, - build_set_immutability_policy_request, - build_set_legal_hold_request, - build_set_metadata_request, - build_set_tags_request, - build_set_tier_request, - build_start_copy_from_url_request, - build_undelete_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class BlobOperations: # pylint: disable=too-many-public-methods - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def download( - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - range_get_content_md5: Optional[bool] = None, - range_get_content_crc64: Optional[bool] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> AsyncIterator[bytes]: - """The Download operation reads or downloads a blob from the system, including its metadata and - properties. You can also call Download to read a snapshot. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param range_get_content_md5: When set to true and specified together with the Range, the - service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. Default value is None. - :type range_get_content_md5: bool - :param range_get_content_crc64: When set to true and specified together with the Range, the - service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. Default value is None. - :type range_get_content_crc64: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: AsyncIterator[bytes] or the result of cls(response) - :rtype: AsyncIterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_download_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - range=range, - lease_id=_lease_id, - range_get_content_md5=range_get_content_md5, - range_get_content_crc64=range_get_content_crc64, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_properties( # pylint: disable=inconsistent-return-statements - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and - system properties for the blob. It does not return the content of the blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_properties_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-incremental-copy"] = self._deserialize( - "bool", response.headers.get("x-ms-incremental-copy") - ) - response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( - "str", response.headers.get("x-ms-copy-destination-snapshot") - ) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) - response_headers["x-ms-access-tier-inferred"] = self._deserialize( - "bool", response.headers.get("x-ms-access-tier-inferred") - ) - response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) - response_headers["x-ms-access-tier-change-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-access-tier-change-time") - ) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-rehydrate-priority"] = self._deserialize( - "str", response.headers.get("x-ms-rehydrate-priority") - ) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, - request_id_parameter: Optional[str] = None, - blob_delete_type: Literal["Permanent"] = "Permanent", - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - permanently removed from the storage account. If the storage account's soft delete feature is - enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] - (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's - data is permanently removed from the storage account. Note that you continue to be charged for - the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and - specify the "include=deleted" query parameter to discover which blobs and snapshots have been - soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other - operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code - of 404 (ResourceNotFound). - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the - following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. Known values are: "include" and "only". - Default value is None. - :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and - None. Default value is "Permanent". - :type blob_delete_type: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - lease_id=_lease_id, - delete_snapshots=delete_snapshots, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_delete_type=blob_delete_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def undelete( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Undelete a blob that was previously soft deleted. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_undelete_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_expiry( # pylint: disable=inconsistent-return-statements - self, - expiry_options: Union[str, _models.BlobExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets the time a blob will expire and be deleted. - - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_expiry_request( - url=self._config.url, - expiry_options=expiry_options, - timeout=timeout, - request_id_parameter=request_id_parameter, - expires_on=expires_on, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_http_headers( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set HTTP Headers operation sets system properties on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_http_headers_request( - url=self._config.url, - timeout=timeout, - blob_cache_control=_blob_cache_control, - blob_content_type=_blob_content_type, - blob_content_md5=_blob_content_md5, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_content_disposition=_blob_content_disposition, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_immutability_policy( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Immutability Policy operation sets the immutability policy on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_immutability_policy_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - if_unmodified_since=_if_unmodified_since, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """The Delete Immutability Policy operation deletes the immutability policy on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_immutability_policy_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_legal_hold( # pylint: disable=inconsistent-return-statements - self, legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """The Set Legal Hold operation sets a legal hold on the blob. - - :param legal_hold: Specified if a legal hold should be set on the blob. Required. - :type legal_hold: bool - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_legal_hold_request( - url=self._config.url, - legal_hold=legal_hold, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_metadata( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or - more name-value pairs. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_metadata_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def acquire_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a - lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. Default value is None. - :type duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_acquire_lease_request( - url=self._config.url, - timeout=timeout, - duration=duration, - proposed_lease_id=proposed_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def release_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_release_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def renew_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_renew_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def change_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Required. - :type proposed_lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_change_lease_request( - url=self._config.url, - lease_id=lease_id, - proposed_lease_id=proposed_lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def break_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param break_period: For a break operation, proposed duration the lease should continue before - it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining on the lease is used. A new - lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a - fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease - breaks immediately. Default value is None. - :type break_period: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_break_lease_request( - url=self._config.url, - timeout=timeout, - break_period=break_period, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def create_snapshot( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create Snapshot operation creates a read-only snapshot of a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_create_snapshot_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def start_copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - seal_blob: Optional[bool] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Start Copy From URL operation copies a blob or an internet resource to a new blob. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. Known values are: "High" and "Standard". Default value is None. - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. Default value is None. - :type seal_blob: bool - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_tags = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_tags = source_modified_access_conditions.source_if_tags - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_start_copy_from_url_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - metadata=metadata, - tier=tier, - rehydrate_priority=rehydrate_priority, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_tags=_source_if_tags, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - seal_blob=seal_blob, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not - return a response until the copy is complete. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be - copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and - "COPY". Default value is None. - :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - x_ms_requires_sync: Literal["true"] = kwargs.pop( - "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") - ) - cls: ClsType[None] = kwargs.pop("cls", None) - - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - _encryption_scope = None - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - - _request = build_copy_from_url_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - metadata=metadata, - tier=tier, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - source_content_md5=source_content_md5, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - copy_source_authorization=copy_source_authorization, - encryption_scope=_encryption_scope, - copy_source_tags=copy_source_tags, - x_ms_requires_sync=x_ms_requires_sync, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a - destination blob with zero length and full metadata. - - :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. Required. - :type copy_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) - copy_action_abort_constant: Literal["abort"] = kwargs.pop( - "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") - ) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_abort_copy_from_url_request( - url=self._config.url, - copy_id=copy_id, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - comp=comp, - copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_tier( # pylint: disable=inconsistent-return-statements - self, - tier: Union[str, _models.AccessTierRequired], - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a - premium storage account and on a block blob in a blob storage account (locally redundant - storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of - the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not - update the blob's ETag. - - :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", - "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". - Required. - :type tier: str or ~azure.storage.blob.models.AccessTierRequired - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. Known values are: "High" and "Standard". Default value is None. - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - - _request = build_set_tier_request( - url=self._config.url, - tier=tier, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - rehydrate_priority=rehydrate_priority, - request_id_parameter=request_id_parameter, - lease_id=_lease_id, - if_tags=_if_tags, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def query( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - query_request: Optional[_models.QueryRequest] = None, - **kwargs: Any - ) -> AsyncIterator[bytes]: - """The Query operation enables users to select/project on blob data by providing simple query - expressions. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param query_request: the query request. Default value is None. - :type query_request: ~azure.storage.blob.models.QueryRequest - :return: AsyncIterator[bytes] or the result of cls(response) - :rtype: AsyncIterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if query_request is not None: - _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) - else: - _content = None - - _request = build_query_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_tags( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> _models.BlobTags: - """The Get Tags operation enables users to get the tags associated with a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: BlobTags or the result of cls(response) - :rtype: ~azure.storage.blob.models.BlobTags - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) - - _if_tags = None - _lease_id = None - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_tags_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - snapshot=snapshot, - version_id=version_id, - if_tags=_if_tags, - lease_id=_lease_id, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("BlobTags", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def set_tags( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - version_id: Optional[str] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - tags: Optional[_models.BlobTags] = None, - **kwargs: Any - ) -> None: - """The Set Tags operation enables users to set tags on a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param tags: Blob tags. Default value is None. - :type tags: ~azure.storage.blob.models.BlobTags - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_tags = None - _lease_id = None - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if tags is not None: - _content = self._serialize.body(tags, "BlobTags", is_xml=True) - else: - _content = None - - _request = build_set_tags_request( - url=self._config.url, - timeout=timeout, - version_id=version_id, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - request_id_parameter=request_id_parameter, - if_tags=_if_tags, - lease_id=_lease_id, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py deleted file mode 100644 index d833c25c0eec..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ /dev/null @@ -1,1130 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._block_blob_operations import ( - build_commit_block_list_request, - build_get_block_list_request, - build_put_blob_from_url_request, - build_stage_block_from_url_request, - build_stage_block_request, - build_upload_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class BlockBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`block_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def upload( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - transactional_content_crc64: Optional[bytes] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Block Blob operation updates the content of an existing block blob. Updating an - existing block blob overwrites any existing metadata on the blob. Partial updates are not - supported with Put Blob; the content of the existing blob is overwritten with the content of - the new blob. To perform a partial update of the content of a block blob, use the Put Block - List operation. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_upload_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - transactional_content_crc64=transactional_content_crc64, - blob_type=blob_type, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def put_blob_from_url( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - copy_source: str, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - copy_source_blob_properties: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are - read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial - updates are not supported with Put Blob from URL; the content of an existing blob is - overwritten with the content of the new blob. To perform partial updates to a block blob’s - contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - - :param content_length: The length of the request. Required. - :type content_length: int - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. Default value is None. - :type copy_source_blob_properties: bool - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be - copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and - "COPY". Default value is None. - :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_tags = source_modified_access_conditions.source_if_tags - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_put_blob_from_url_request( - url=self._config.url, - content_length=content_length, - copy_source=copy_source, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_tags=_source_if_tags, - request_id_parameter=request_id_parameter, - source_content_md5=source_content_md5, - blob_tags_string=blob_tags_string, - copy_source_blob_properties=copy_source_blob_properties, - copy_source_authorization=copy_source_authorization, - copy_source_tags=copy_source_tags, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def stage_block( # pylint: disable=inconsistent-return-statements - self, - block_id: str, - content_length: int, - body: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """The Stage Block operation creates a new block to be committed as part of a blob. - - :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the - string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. Required. - :type block_id: str - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - _content = body - - _request = build_stage_block_request( - url=self._config.url, - block_id=block_id, - content_length=content_length, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def stage_block_from_url( # pylint: disable=inconsistent-return-statements - self, - block_id: str, - content_length: int, - source_url: str, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Stage Block operation creates a new block to be committed as part of a blob where the - contents are read from a URL. - - :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the - string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. Required. - :type block_id: str - :param content_length: The length of the request. Required. - :type content_length: int - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param source_range: Bytes of source data in the specified range. Default value is None. - :type source_range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_stage_block_from_url_request( - url=self._config.url, - block_id=block_id, - content_length=content_length, - source_url=source_url, - source_range=source_range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def commit_block_list( # pylint: disable=inconsistent-return-statements - self, - blocks: _models.BlockLookupList, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Commit Block List operation writes a blob by specifying the list of block IDs that make up - the blob. In order to be written as part of a blob, a block must have been successfully written - to the server in a prior Put Block operation. You can call Put Block List to update a blob by - uploading only those blocks that have changed, then committing the new and existing blocks - together. You can do this by specifying whether to commit a block from the committed block list - or from the uncommitted block list, or to commit the most recently uploaded version of the - block, whichever list it may belong to. - - :param blocks: Blob Blocks. Required. - :type blocks: ~azure.storage.blob.models.BlockLookupList - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) - - _request = build_commit_block_list_request( - url=self._config.url, - timeout=timeout, - blob_cache_control=_blob_cache_control, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_block_list( - self, - snapshot: Optional[str] = None, - list_type: Union[str, _models.BlockListType] = "committed", - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.BlockList: - """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a - block blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and - "all". Default value is "committed". - :type list_type: str or ~azure.storage.blob.models.BlockListType - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: BlockList or the result of cls(response) - :rtype: ~azure.storage.blob.models.BlockList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) - - _lease_id = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - - _request = build_get_block_list_request( - url=self._config.url, - snapshot=snapshot, - list_type=list_type, - timeout=timeout, - lease_id=_lease_id, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("BlockList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py deleted file mode 100644 index 48a1a14749c7..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py +++ /dev/null @@ -1,1797 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._container_operations import ( - build_acquire_lease_request, - build_break_lease_request, - build_change_lease_request, - build_create_request, - build_delete_request, - build_filter_blobs_request, - build_get_access_policy_request, - build_get_account_info_request, - build_get_properties_request, - build_list_blob_flat_segment_request, - build_list_blob_hierarchy_segment_request, - build_release_lease_request, - build_rename_request, - build_renew_lease_request, - build_restore_request, - build_set_access_policy_request, - build_set_metadata_request, - build_submit_batch_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class ContainerOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`container` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def create( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - request_id_parameter: Optional[str] = None, - container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """creates a new container under the specified account. If the container with the same name - already exists, the operation fails. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param access: Specifies whether data in the container may be accessed publicly and the level - of access. Known values are: "container" and "blob". Default value is None. - :type access: str or ~azure.storage.blob.models.PublicAccessType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. Default value is None. - :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _default_encryption_scope = None - _prevent_encryption_scope_override = None - if container_cpk_scope_info is not None: - _default_encryption_scope = container_cpk_scope_info.default_encryption_scope - _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - - _request = build_create_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - access=access, - request_id_parameter=request_id_parameter, - default_encryption_scope=_default_encryption_scope, - prevent_encryption_scope_override=_prevent_encryption_scope_override, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_properties( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """returns all user-defined metadata and system properties for the specified container. The data - returned does not include the container's list of blobs. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_properties_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-public-access"] = self._deserialize( - "str", response.headers.get("x-ms-blob-public-access") - ) - response_headers["x-ms-has-immutability-policy"] = self._deserialize( - "bool", response.headers.get("x-ms-has-immutability-policy") - ) - response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) - response_headers["x-ms-default-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-default-encryption-scope") - ) - response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( - "bool", response.headers.get("x-ms-deny-encryption-scope-override") - ) - response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( - "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """operation marks the specified container for deletion. The container and any blobs contained - within it are later deleted during garbage collection. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_metadata( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """operation sets one or more user-defined name-value pairs for the specified container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - - _request = build_set_metadata_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - metadata=metadata, - if_modified_since=_if_modified_since, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_access_policy( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> List[_models.SignedIdentifier]: - """gets the permissions for the specified container. The permissions indicate whether container - data may be accessed publicly. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: list of SignedIdentifier or the result of cls(response) - :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_access_policy_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-blob-public-access"] = self._deserialize( - "str", response.headers.get("x-ms-blob-public-access") - ) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def set_access_policy( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - container_acl: Optional[List[_models.SignedIdentifier]] = None, - **kwargs: Any - ) -> None: - """sets the permissions for the specified container. The permissions indicate whether blobs in a - container may be accessed publicly. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param access: Specifies whether data in the container may be accessed publicly and the level - of access. Known values are: "container" and "blob". Default value is None. - :type access: str or ~azure.storage.blob.models.PublicAccessType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param container_acl: the acls for the container. Default value is None. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} - if container_acl is not None: - _content = self._serialize.body( - container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt - ) - else: - _content = None - - _request = build_set_access_policy_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - access=access, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def restore( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - deleted_container_name: Optional[str] = None, - deleted_container_version: Optional[str] = None, - **kwargs: Any - ) -> None: - """Restores a previously-deleted container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. Default value is None. - :type deleted_container_name: str - :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. Default value is None. - :type deleted_container_version: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_restore_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - deleted_container_name=deleted_container_name, - deleted_container_version=deleted_container_version, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def rename( # pylint: disable=inconsistent-return-statements - self, - source_container_name: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - source_lease_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """Renames an existing container. - - :param source_container_name: Required. Specifies the name of the container to rename. - Required. - :type source_container_name: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. Default value is None. - :type source_lease_id: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_rename_request( - url=self._config.url, - source_container_name=source_container_name, - timeout=timeout, - request_id_parameter=request_id_parameter, - source_lease_id=source_lease_id, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def submit_batch( - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterator[bytes]: - """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: AsyncIterator[bytes] or the result of cls(response) - :rtype: AsyncIterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: str = kwargs.pop( - "multipart_content_type", _headers.pop("Content-Type", "application/xml") - ) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _content = body - - _request = build_submit_batch_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - multipart_content_type=multipart_content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def filter_blobs( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any - ) -> _models.FilterBlobSegment: - """The Filter Blobs operation enables callers to list blobs in a container whose tags match a - given search expression. Filter blobs searches within the given container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. Default value is None. - :type where: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] - :return: FilterBlobSegment or the result of cls(response) - :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) - - _request = build_filter_blobs_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - where=where, - marker=marker, - maxresults=maxresults, - include=include, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def acquire_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a - lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. Default value is None. - :type duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_acquire_lease_request( - url=self._config.url, - timeout=timeout, - duration=duration, - proposed_lease_id=proposed_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def release_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_release_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def renew_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_renew_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def break_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param break_period: For a break operation, proposed duration the lease should continue before - it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining on the lease is used. A new - lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a - fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease - breaks immediately. Default value is None. - :type break_period: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_break_lease_request( - url=self._config.url, - timeout=timeout, - break_period=break_period, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def change_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Required. - :type proposed_lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_change_lease_request( - url=self._config.url, - lease_id=lease_id, - proposed_lease_id=proposed_lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def list_blob_flat_segment( - self, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsFlatSegmentResponse: - """[Update] The List Blobs operation returns a list of the blobs under the specified container. - - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsFlatSegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_flat_segment_request( - url=self._config.url, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def list_blob_hierarchy_segment( - self, - delimiter: str, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsHierarchySegmentResponse: - """[Update] The List Blobs operation returns a list of the blobs under the specified container. - - :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose names begin with - the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. Required. - :type delimiter: str - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsHierarchySegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_hierarchy_segment_request( - url=self._config.url, - delimiter=delimiter, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py deleted file mode 100644 index bf77639fe40e..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ /dev/null @@ -1,1433 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._page_blob_operations import ( - build_clear_pages_request, - build_copy_incremental_request, - build_create_request, - build_get_page_ranges_diff_request, - build_get_page_ranges_request, - build_resize_request, - build_update_sequence_number_request, - build_upload_pages_from_url_request, - build_upload_pages_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class PageBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`page_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def create( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - blob_content_length: int, - timeout: Optional[int] = None, - tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, - metadata: Optional[Dict[str, str]] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create operation creates a new page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. Required. - :type blob_content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", - "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. - :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled - value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. Default value is 0. - :type blob_sequence_number: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_create_request( - url=self._config.url, - content_length=content_length, - blob_content_length=blob_content_length, - timeout=timeout, - tier=tier, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_sequence_number=blob_sequence_number, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def upload_pages( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Pages operation writes a range of pages to a page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_upload_pages_request( - url=self._config.url, - content_length=content_length, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - timeout=timeout, - range=range, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - page_write=page_write, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def clear_pages( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Clear Pages operation clears a set of pages from a page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_clear_pages_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - range=range, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - page_write=page_write, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def upload_pages_from_url( # pylint: disable=inconsistent-return-statements - self, - source_url: str, - source_range: str, - content_length: int, - range: str, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Pages operation writes a range of pages to a page blob where the contents are read - from a URL. - - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. Required. - :type source_range: str - :param content_length: The length of the request. Required. - :type content_length: int - :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. Required. - :type range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_upload_pages_from_url_request( - url=self._config.url, - source_url=source_url, - source_range=source_range, - content_length=content_length, - range=range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - page_write=page_write, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_page_ranges( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.PageList: - """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot - of a page blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: PageList or the result of cls(response) - :rtype: ~azure.storage.blob.models.PageList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - cls: ClsType[_models.PageList] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_page_ranges_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - range=range, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - marker=marker, - maxresults=maxresults, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("PageList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_page_ranges_diff( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - prevsnapshot: Optional[str] = None, - prev_snapshot_url: Optional[str] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.PageList: - """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that - were changed between target blob and previous snapshot. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a - DateTime value that specifies that the response will contain only pages that were changed - between target blob and previous snapshot. Changed pages include both updated and cleared - pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is - the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. Default value is None. - :type prevsnapshot: str - :param prev_snapshot_url: Optional. This header is only supported in service versions - 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The - response will only contain pages that were changed between the target blob and its previous - snapshot. Default value is None. - :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: PageList or the result of cls(response) - :rtype: ~azure.storage.blob.models.PageList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - cls: ClsType[_models.PageList] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_page_ranges_diff_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - prevsnapshot=prevsnapshot, - prev_snapshot_url=prev_snapshot_url, - range=range, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - marker=marker, - maxresults=maxresults, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("PageList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def resize( # pylint: disable=inconsistent-return-statements - self, - blob_content_length: int, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Resize the Blob. - - :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. Required. - :type blob_content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_resize_request( - url=self._config.url, - blob_content_length=blob_content_length, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def update_sequence_number( # pylint: disable=inconsistent-return-statements - self, - sequence_number_action: Union[str, _models.SequenceNumberActionType], - timeout: Optional[int] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Update the sequence number of the blob. - - :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the - request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. Known values are: "max", "update", and "increment". - Required. - :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled - value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. Default value is 0. - :type blob_sequence_number: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_update_sequence_number_request( - url=self._config.url, - sequence_number_action=sequence_number_action, - timeout=timeout, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_sequence_number=blob_sequence_number, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def copy_incremental( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Copy Incremental operation copies a snapshot of the source page blob to a destination page - blob. The snapshot is copied such that only the differential changes between the previously - copied snapshot are transferred to the destination. The copied snapshots are complete copies of - the original snapshot and can be read or copied from as usual. This API is supported since REST - version 2016-05-31. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_copy_incremental_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py deleted file mode 100644 index f9f8ff0be86c..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py +++ /dev/null @@ -1,745 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ...operations._service_operations import ( - build_filter_blobs_request, - build_get_account_info_request, - build_get_properties_request, - build_get_statistics_request, - build_get_user_delegation_key_request, - build_list_containers_segment_request, - build_set_properties_request, - build_submit_batch_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class ServiceOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.aio.AzureBlobStorage`'s - :attr:`service` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def set_properties( # pylint: disable=inconsistent-return-statements - self, - storage_service_properties: _models.StorageServiceProperties, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets properties for a storage account's Blob service endpoint, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param storage_service_properties: The StorageService properties. Required. - :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) - - _request = build_set_properties_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_properties( - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> _models.StorageServiceProperties: - """gets the properties of a storage account's Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: StorageServiceProperties or the result of cls(response) - :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_statistics( - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> _models.StorageServiceStats: - """Retrieves statistics related to replication for the Blob service. It is only available on the - secondary location endpoint when read-access geo-redundant replication is enabled for the - storage account. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: StorageServiceStats or the result of cls(response) - :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) - - _request = build_get_statistics_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def list_containers_segment( - self, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListContainersSegmentResponse: - """The List Containers Segment operation returns a list of the containers under the specified - account. - - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListContainersSegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_containers_segment_request( - url=self._config.url, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_user_delegation_key( - self, - key_info: _models.KeyInfo, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.UserDelegationKey: - """Retrieves a user delegation key for the Blob service. This is only a valid operation when using - bearer token authentication. - - :param key_info: Key information. Required. - :type key_info: ~azure.storage.blob.models.KeyInfo - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: UserDelegationKey or the result of cls(response) - :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) - - _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) - - _request = build_get_user_delegation_key_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def submit_batch( - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterator[bytes]: - """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: AsyncIterator[bytes] or the result of cls(response) - :rtype: AsyncIterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: str = kwargs.pop( - "multipart_content_type", _headers.pop("Content-Type", "application/xml") - ) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _content = body - - _request = build_submit_batch_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - multipart_content_type=multipart_content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def filter_blobs( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any - ) -> _models.FilterBlobSegment: - """The Filter Blobs operation enables callers to list blobs across all containers whose tags match - a given search expression. Filter blobs searches across all containers within a storage - account but can be scoped within the expression to a single container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. Default value is None. - :type where: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] - :return: FilterBlobSegment or the result of cls(response) - :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) - - _request = build_filter_blobs_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - where=where, - marker=marker, - maxresults=maxresults, - include=include, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py deleted file mode 100644 index 63ca7e23fc24..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py +++ /dev/null @@ -1,173 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._models_py3 import AccessPolicy -from ._models_py3 import AppendPositionAccessConditions -from ._models_py3 import ArrowConfiguration -from ._models_py3 import ArrowField -from ._models_py3 import BlobFlatListSegment -from ._models_py3 import BlobHTTPHeaders -from ._models_py3 import BlobHierarchyListSegment -from ._models_py3 import BlobItemInternal -from ._models_py3 import BlobMetadata -from ._models_py3 import BlobName -from ._models_py3 import BlobPrefix -from ._models_py3 import BlobPropertiesInternal -from ._models_py3 import BlobTag -from ._models_py3 import BlobTags -from ._models_py3 import Block -from ._models_py3 import BlockList -from ._models_py3 import BlockLookupList -from ._models_py3 import ClearRange -from ._models_py3 import ContainerCpkScopeInfo -from ._models_py3 import ContainerItem -from ._models_py3 import ContainerProperties -from ._models_py3 import CorsRule -from ._models_py3 import CpkInfo -from ._models_py3 import CpkScopeInfo -from ._models_py3 import DelimitedTextConfiguration -from ._models_py3 import FilterBlobItem -from ._models_py3 import FilterBlobSegment -from ._models_py3 import GeoReplication -from ._models_py3 import JsonTextConfiguration -from ._models_py3 import KeyInfo -from ._models_py3 import LeaseAccessConditions -from ._models_py3 import ListBlobsFlatSegmentResponse -from ._models_py3 import ListBlobsHierarchySegmentResponse -from ._models_py3 import ListContainersSegmentResponse -from ._models_py3 import Logging -from ._models_py3 import Metrics -from ._models_py3 import ModifiedAccessConditions -from ._models_py3 import PageList -from ._models_py3 import PageRange -from ._models_py3 import QueryFormat -from ._models_py3 import QueryRequest -from ._models_py3 import QuerySerialization -from ._models_py3 import RetentionPolicy -from ._models_py3 import SequenceNumberAccessConditions -from ._models_py3 import SignedIdentifier -from ._models_py3 import SourceModifiedAccessConditions -from ._models_py3 import StaticWebsite -from ._models_py3 import StorageError -from ._models_py3 import StorageServiceProperties -from ._models_py3 import StorageServiceStats -from ._models_py3 import UserDelegationKey - -from ._azure_blob_storage_enums import AccessTier -from ._azure_blob_storage_enums import AccessTierOptional -from ._azure_blob_storage_enums import AccessTierRequired -from ._azure_blob_storage_enums import AccountKind -from ._azure_blob_storage_enums import ArchiveStatus -from ._azure_blob_storage_enums import BlobCopySourceTags -from ._azure_blob_storage_enums import BlobExpiryOptions -from ._azure_blob_storage_enums import BlobImmutabilityPolicyMode -from ._azure_blob_storage_enums import BlobType -from ._azure_blob_storage_enums import BlockListType -from ._azure_blob_storage_enums import CopyStatusType -from ._azure_blob_storage_enums import DeleteSnapshotsOptionType -from ._azure_blob_storage_enums import EncryptionAlgorithmType -from ._azure_blob_storage_enums import FilterBlobsIncludeItem -from ._azure_blob_storage_enums import GeoReplicationStatusType -from ._azure_blob_storage_enums import LeaseDurationType -from ._azure_blob_storage_enums import LeaseStateType -from ._azure_blob_storage_enums import LeaseStatusType -from ._azure_blob_storage_enums import ListBlobsIncludeItem -from ._azure_blob_storage_enums import ListContainersIncludeType -from ._azure_blob_storage_enums import PremiumPageBlobAccessTier -from ._azure_blob_storage_enums import PublicAccessType -from ._azure_blob_storage_enums import QueryFormatType -from ._azure_blob_storage_enums import RehydratePriority -from ._azure_blob_storage_enums import SequenceNumberActionType -from ._azure_blob_storage_enums import SkuName -from ._azure_blob_storage_enums import StorageErrorCode -from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "AccessPolicy", - "AppendPositionAccessConditions", - "ArrowConfiguration", - "ArrowField", - "BlobFlatListSegment", - "BlobHTTPHeaders", - "BlobHierarchyListSegment", - "BlobItemInternal", - "BlobMetadata", - "BlobName", - "BlobPrefix", - "BlobPropertiesInternal", - "BlobTag", - "BlobTags", - "Block", - "BlockList", - "BlockLookupList", - "ClearRange", - "ContainerCpkScopeInfo", - "ContainerItem", - "ContainerProperties", - "CorsRule", - "CpkInfo", - "CpkScopeInfo", - "DelimitedTextConfiguration", - "FilterBlobItem", - "FilterBlobSegment", - "GeoReplication", - "JsonTextConfiguration", - "KeyInfo", - "LeaseAccessConditions", - "ListBlobsFlatSegmentResponse", - "ListBlobsHierarchySegmentResponse", - "ListContainersSegmentResponse", - "Logging", - "Metrics", - "ModifiedAccessConditions", - "PageList", - "PageRange", - "QueryFormat", - "QueryRequest", - "QuerySerialization", - "RetentionPolicy", - "SequenceNumberAccessConditions", - "SignedIdentifier", - "SourceModifiedAccessConditions", - "StaticWebsite", - "StorageError", - "StorageServiceProperties", - "StorageServiceStats", - "UserDelegationKey", - "AccessTier", - "AccessTierOptional", - "AccessTierRequired", - "AccountKind", - "ArchiveStatus", - "BlobCopySourceTags", - "BlobExpiryOptions", - "BlobImmutabilityPolicyMode", - "BlobType", - "BlockListType", - "CopyStatusType", - "DeleteSnapshotsOptionType", - "EncryptionAlgorithmType", - "FilterBlobsIncludeItem", - "GeoReplicationStatusType", - "LeaseDurationType", - "LeaseStateType", - "LeaseStatusType", - "ListBlobsIncludeItem", - "ListContainersIncludeType", - "PremiumPageBlobAccessTier", - "PublicAccessType", - "QueryFormatType", - "RehydratePriority", - "SequenceNumberActionType", - "SkuName", - "StorageErrorCode", -] -__all__.extend([p for p in _patch_all if p not in __all__]) -_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py deleted file mode 100644 index 8fb7691ceb85..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ /dev/null @@ -1,391 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum -from azure.core import CaseInsensitiveEnumMeta - - -class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AccessTier.""" - - P4 = "P4" - P6 = "P6" - P10 = "P10" - P15 = "P15" - P20 = "P20" - P30 = "P30" - P40 = "P40" - P50 = "P50" - P60 = "P60" - P70 = "P70" - P80 = "P80" - HOT = "Hot" - COOL = "Cool" - ARCHIVE = "Archive" - PREMIUM = "Premium" - COLD = "Cold" - - -class AccessTierOptional(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AccessTierOptional.""" - - P4 = "P4" - P6 = "P6" - P10 = "P10" - P15 = "P15" - P20 = "P20" - P30 = "P30" - P40 = "P40" - P50 = "P50" - P60 = "P60" - P70 = "P70" - P80 = "P80" - HOT = "Hot" - COOL = "Cool" - ARCHIVE = "Archive" - COLD = "Cold" - - -class AccessTierRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AccessTierRequired.""" - - P4 = "P4" - P6 = "P6" - P10 = "P10" - P15 = "P15" - P20 = "P20" - P30 = "P30" - P40 = "P40" - P50 = "P50" - P60 = "P60" - P70 = "P70" - P80 = "P80" - HOT = "Hot" - COOL = "Cool" - ARCHIVE = "Archive" - COLD = "Cold" - - -class AccountKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AccountKind.""" - - STORAGE = "Storage" - BLOB_STORAGE = "BlobStorage" - STORAGE_V2 = "StorageV2" - FILE_STORAGE = "FileStorage" - BLOCK_BLOB_STORAGE = "BlockBlobStorage" - - -class ArchiveStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ArchiveStatus.""" - - REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" - REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" - REHYDRATE_PENDING_TO_COLD = "rehydrate-pending-to-cold" - - -class BlobCopySourceTags(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlobCopySourceTags.""" - - REPLACE = "REPLACE" - COPY = "COPY" - - -class BlobExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlobExpiryOptions.""" - - NEVER_EXPIRE = "NeverExpire" - RELATIVE_TO_CREATION = "RelativeToCreation" - RELATIVE_TO_NOW = "RelativeToNow" - ABSOLUTE = "Absolute" - - -class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlobImmutabilityPolicyMode.""" - - MUTABLE = "Mutable" - UNLOCKED = "Unlocked" - LOCKED = "Locked" - - -class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlobType.""" - - BLOCK_BLOB = "BlockBlob" - PAGE_BLOB = "PageBlob" - APPEND_BLOB = "AppendBlob" - - -class BlockListType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlockListType.""" - - COMMITTED = "committed" - UNCOMMITTED = "uncommitted" - ALL = "all" - - -class CopyStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """CopyStatusType.""" - - PENDING = "pending" - SUCCESS = "success" - ABORTED = "aborted" - FAILED = "failed" - - -class DeleteSnapshotsOptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """DeleteSnapshotsOptionType.""" - - INCLUDE = "include" - ONLY = "only" - - -class EncryptionAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """EncryptionAlgorithmType.""" - - NONE = "None" - AES256 = "AES256" - - -class FilterBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """FilterBlobsIncludeItem.""" - - NONE = "none" - VERSIONS = "versions" - - -class GeoReplicationStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the secondary location.""" - - LIVE = "live" - BOOTSTRAP = "bootstrap" - UNAVAILABLE = "unavailable" - - -class LeaseDurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """LeaseDurationType.""" - - INFINITE = "infinite" - FIXED = "fixed" - - -class LeaseStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """LeaseStateType.""" - - AVAILABLE = "available" - LEASED = "leased" - EXPIRED = "expired" - BREAKING = "breaking" - BROKEN = "broken" - - -class LeaseStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """LeaseStatusType.""" - - LOCKED = "locked" - UNLOCKED = "unlocked" - - -class ListBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListBlobsIncludeItem.""" - - COPY = "copy" - DELETED = "deleted" - METADATA = "metadata" - SNAPSHOTS = "snapshots" - UNCOMMITTEDBLOBS = "uncommittedblobs" - VERSIONS = "versions" - TAGS = "tags" - IMMUTABILITYPOLICY = "immutabilitypolicy" - LEGALHOLD = "legalhold" - DELETEDWITHVERSIONS = "deletedwithversions" - - -class ListContainersIncludeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListContainersIncludeType.""" - - METADATA = "metadata" - DELETED = "deleted" - SYSTEM = "system" - - -class PremiumPageBlobAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PremiumPageBlobAccessTier.""" - - P4 = "P4" - P6 = "P6" - P10 = "P10" - P15 = "P15" - P20 = "P20" - P30 = "P30" - P40 = "P40" - P50 = "P50" - P60 = "P60" - P70 = "P70" - P80 = "P80" - - -class PublicAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PublicAccessType.""" - - CONTAINER = "container" - BLOB = "blob" - - -class QueryFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The quick query format type.""" - - DELIMITED = "delimited" - JSON = "json" - ARROW = "arrow" - PARQUET = "parquet" - - -class RehydratePriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """If an object is in rehydrate pending state then this header is returned with priority of - rehydrate. Valid values are High and Standard. - """ - - HIGH = "High" - STANDARD = "Standard" - - -class SequenceNumberActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """SequenceNumberActionType.""" - - MAX = "max" - UPDATE = "update" - INCREMENT = "increment" - - -class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """SkuName.""" - - STANDARD_LRS = "Standard_LRS" - STANDARD_GRS = "Standard_GRS" - STANDARD_RAGRS = "Standard_RAGRS" - STANDARD_ZRS = "Standard_ZRS" - PREMIUM_LRS = "Premium_LRS" - - -class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Error codes returned by the service.""" - - ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" - ACCOUNT_BEING_CREATED = "AccountBeingCreated" - ACCOUNT_IS_DISABLED = "AccountIsDisabled" - AUTHENTICATION_FAILED = "AuthenticationFailed" - AUTHORIZATION_FAILURE = "AuthorizationFailure" - CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" - CONDITION_NOT_MET = "ConditionNotMet" - EMPTY_METADATA_KEY = "EmptyMetadataKey" - INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" - INTERNAL_ERROR = "InternalError" - INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" - INVALID_HEADER_VALUE = "InvalidHeaderValue" - INVALID_HTTP_VERB = "InvalidHttpVerb" - INVALID_INPUT = "InvalidInput" - INVALID_MD5 = "InvalidMd5" - INVALID_METADATA = "InvalidMetadata" - INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" - INVALID_RANGE = "InvalidRange" - INVALID_RESOURCE_NAME = "InvalidResourceName" - INVALID_URI = "InvalidUri" - INVALID_XML_DOCUMENT = "InvalidXmlDocument" - INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" - MD5_MISMATCH = "Md5Mismatch" - METADATA_TOO_LARGE = "MetadataTooLarge" - MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" - MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" - MISSING_REQUIRED_HEADER = "MissingRequiredHeader" - MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" - MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" - OPERATION_TIMED_OUT = "OperationTimedOut" - OUT_OF_RANGE_INPUT = "OutOfRangeInput" - OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" - REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" - RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" - REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" - RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" - RESOURCE_NOT_FOUND = "ResourceNotFound" - SERVER_BUSY = "ServerBusy" - UNSUPPORTED_HEADER = "UnsupportedHeader" - UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" - UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" - UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" - APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" - BLOB_ALREADY_EXISTS = "BlobAlreadyExists" - BLOB_IMMUTABLE_DUE_TO_POLICY = "BlobImmutableDueToPolicy" - BLOB_NOT_FOUND = "BlobNotFound" - BLOB_OVERWRITTEN = "BlobOverwritten" - BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" - BLOB_USES_CUSTOMER_SPECIFIED_ENCRYPTION = "BlobUsesCustomerSpecifiedEncryption" - BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" - BLOCK_LIST_TOO_LONG = "BlockListTooLong" - CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" - CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" - CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" - CONTAINER_BEING_DELETED = "ContainerBeingDeleted" - CONTAINER_DISABLED = "ContainerDisabled" - CONTAINER_NOT_FOUND = "ContainerNotFound" - CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" - COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" - COPY_ID_MISMATCH = "CopyIdMismatch" - FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" - INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" - INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" - INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" - INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" - INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" - INVALID_BLOB_TIER = "InvalidBlobTier" - INVALID_BLOB_TYPE = "InvalidBlobType" - INVALID_BLOCK_ID = "InvalidBlockId" - INVALID_BLOCK_LIST = "InvalidBlockList" - INVALID_OPERATION = "InvalidOperation" - INVALID_PAGE_RANGE = "InvalidPageRange" - INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" - INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" - INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" - LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" - LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" - LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" - LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" - LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" - LEASE_ID_MISSING = "LeaseIdMissing" - LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" - LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" - LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" - LEASE_LOST = "LeaseLost" - LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" - LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" - LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" - MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" - NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" - NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" - OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" - PENDING_COPY_OPERATION = "PendingCopyOperation" - PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" - PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" - PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" - SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" - SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" - SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" - SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" - SNAPSHOTS_PRESENT = "SnapshotsPresent" - SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" - SYSTEM_IN_USE = "SystemInUse" - TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" - UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" - BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" - BLOB_ARCHIVED = "BlobArchived" - BLOB_NOT_ARCHIVED = "BlobNotArchived" - AUTHORIZATION_SOURCE_IP_MISMATCH = "AuthorizationSourceIPMismatch" - AUTHORIZATION_PROTOCOL_MISMATCH = "AuthorizationProtocolMismatch" - AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch" - AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch" - AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py deleted file mode 100644 index cd88cb20487f..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py +++ /dev/null @@ -1,2772 +0,0 @@ -# coding=utf-8 -# pylint: disable=too-many-lines -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -import datetime -import sys -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union - -from .. import _serialization - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object - - -class AccessPolicy(_serialization.Model): - """An Access policy. - - :ivar start: the date-time the policy is active. - :vartype start: str - :ivar expiry: the date-time the policy expires. - :vartype expiry: str - :ivar permission: the permissions for the acl policy. - :vartype permission: str - """ - - _attribute_map = { - "start": {"key": "Start", "type": "str"}, - "expiry": {"key": "Expiry", "type": "str"}, - "permission": {"key": "Permission", "type": "str"}, - } - - def __init__( - self, - *, - start: Optional[str] = None, - expiry: Optional[str] = None, - permission: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword start: the date-time the policy is active. - :paramtype start: str - :keyword expiry: the date-time the policy expires. - :paramtype expiry: str - :keyword permission: the permissions for the acl policy. - :paramtype permission: str - """ - super().__init__(**kwargs) - self.start = start - self.expiry = expiry - self.permission = permission - - -class AppendPositionAccessConditions(_serialization.Model): - """Parameter group. - - :ivar max_size: Optional conditional header. The max length in bytes permitted for the append - blob. If the Append Block operation would cause the blob to exceed that limit or if the blob - size is already greater than the value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :vartype max_size: int - :ivar append_position: Optional conditional header, used only for the Append Block operation. A - number indicating the byte offset to compare. Append Block will succeed only if the append - position is equal to this number. If it is not, the request will fail with the - AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). - :vartype append_position: int - """ - - _attribute_map = { - "max_size": {"key": "maxSize", "type": "int"}, - "append_position": {"key": "appendPosition", "type": "int"}, - } - - def __init__(self, *, max_size: Optional[int] = None, append_position: Optional[int] = None, **kwargs: Any) -> None: - """ - :keyword max_size: Optional conditional header. The max length in bytes permitted for the - append blob. If the Append Block operation would cause the blob to exceed that limit or if the - blob size is already greater than the value specified in this header, the request will fail - with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :paramtype max_size: int - :keyword append_position: Optional conditional header, used only for the Append Block - operation. A number indicating the byte offset to compare. Append Block will succeed only if - the append position is equal to this number. If it is not, the request will fail with the - AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). - :paramtype append_position: int - """ - super().__init__(**kwargs) - self.max_size = max_size - self.append_position = append_position - - -class ArrowConfiguration(_serialization.Model): - """Groups the settings used for formatting the response if the response should be Arrow formatted. - - All required parameters must be populated in order to send to server. - - :ivar schema: Required. - :vartype schema: list[~azure.storage.blob.models.ArrowField] - """ - - _validation = { - "schema": {"required": True}, - } - - _attribute_map = { - "schema": { - "key": "Schema", - "type": "[ArrowField]", - "xml": {"name": "Schema", "wrapped": True, "itemsName": "Field"}, - }, - } - _xml_map = {"name": "ArrowConfiguration"} - - def __init__(self, *, schema: List["_models.ArrowField"], **kwargs: Any) -> None: - """ - :keyword schema: Required. - :paramtype schema: list[~azure.storage.blob.models.ArrowField] - """ - super().__init__(**kwargs) - self.schema = schema - - -class ArrowField(_serialization.Model): - """Groups settings regarding specific field of an arrow schema. - - All required parameters must be populated in order to send to server. - - :ivar type: Required. - :vartype type: str - :ivar name: - :vartype name: str - :ivar precision: - :vartype precision: int - :ivar scale: - :vartype scale: int - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "Type", "type": "str"}, - "name": {"key": "Name", "type": "str"}, - "precision": {"key": "Precision", "type": "int"}, - "scale": {"key": "Scale", "type": "int"}, - } - _xml_map = {"name": "Field"} - - def __init__( - self, - *, - type: str, - name: Optional[str] = None, - precision: Optional[int] = None, - scale: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword type: Required. - :paramtype type: str - :keyword name: - :paramtype name: str - :keyword precision: - :paramtype precision: int - :keyword scale: - :paramtype scale: int - """ - super().__init__(**kwargs) - self.type = type - self.name = name - self.precision = precision - self.scale = scale - - -class BlobFlatListSegment(_serialization.Model): - """BlobFlatListSegment. - - All required parameters must be populated in order to send to server. - - :ivar blob_items: Required. - :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - - _validation = { - "blob_items": {"required": True}, - } - - _attribute_map = { - "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"itemsName": "Blob"}}, - } - _xml_map = {"name": "Blobs"} - - def __init__(self, *, blob_items: List["_models.BlobItemInternal"], **kwargs: Any) -> None: - """ - :keyword blob_items: Required. - :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - super().__init__(**kwargs) - self.blob_items = blob_items - - -class BlobHierarchyListSegment(_serialization.Model): - """BlobHierarchyListSegment. - - All required parameters must be populated in order to send to server. - - :ivar blob_prefixes: - :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] - :ivar blob_items: Required. - :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - - _validation = { - "blob_items": {"required": True}, - } - - _attribute_map = { - "blob_prefixes": {"key": "BlobPrefixes", "type": "[BlobPrefix]", "xml": {"name": "BlobPrefix"}}, - "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"name": "Blob", "itemsName": "Blob"}}, - } - _xml_map = {"name": "Blobs"} - - def __init__( - self, - *, - blob_items: List["_models.BlobItemInternal"], - blob_prefixes: Optional[List["_models.BlobPrefix"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword blob_prefixes: - :paramtype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] - :keyword blob_items: Required. - :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - super().__init__(**kwargs) - self.blob_prefixes = blob_prefixes - self.blob_items = blob_items - - -class BlobHTTPHeaders(_serialization.Model): - """Parameter group. - - :ivar blob_cache_control: Optional. Sets the blob's cache control. If specified, this property - is stored with the blob and returned with a read request. - :vartype blob_cache_control: str - :ivar blob_content_type: Optional. Sets the blob's content type. If specified, this property is - stored with the blob and returned with a read request. - :vartype blob_content_type: str - :ivar blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not - validated, as the hashes for the individual blocks were validated when each was uploaded. - :vartype blob_content_md5: bytes - :ivar blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this - property is stored with the blob and returned with a read request. - :vartype blob_content_encoding: str - :ivar blob_content_language: Optional. Set the blob's content language. If specified, this - property is stored with the blob and returned with a read request. - :vartype blob_content_language: str - :ivar blob_content_disposition: Optional. Sets the blob's Content-Disposition header. - :vartype blob_content_disposition: str - """ - - _attribute_map = { - "blob_cache_control": {"key": "blobCacheControl", "type": "str"}, - "blob_content_type": {"key": "blobContentType", "type": "str"}, - "blob_content_md5": {"key": "blobContentMD5", "type": "bytearray"}, - "blob_content_encoding": {"key": "blobContentEncoding", "type": "str"}, - "blob_content_language": {"key": "blobContentLanguage", "type": "str"}, - "blob_content_disposition": {"key": "blobContentDisposition", "type": "str"}, - } - - def __init__( - self, - *, - blob_cache_control: Optional[str] = None, - blob_content_type: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this - property is stored with the blob and returned with a read request. - :paramtype blob_cache_control: str - :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property - is stored with the blob and returned with a read request. - :paramtype blob_content_type: str - :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is - not validated, as the hashes for the individual blocks were validated when each was uploaded. - :paramtype blob_content_md5: bytes - :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this - property is stored with the blob and returned with a read request. - :paramtype blob_content_encoding: str - :keyword blob_content_language: Optional. Set the blob's content language. If specified, this - property is stored with the blob and returned with a read request. - :paramtype blob_content_language: str - :keyword blob_content_disposition: Optional. Sets the blob's Content-Disposition header. - :paramtype blob_content_disposition: str - """ - super().__init__(**kwargs) - self.blob_cache_control = blob_cache_control - self.blob_content_type = blob_content_type - self.blob_content_md5 = blob_content_md5 - self.blob_content_encoding = blob_content_encoding - self.blob_content_language = blob_content_language - self.blob_content_disposition = blob_content_disposition - - -class BlobItemInternal(_serialization.Model): - """An Azure Storage blob. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: ~azure.storage.blob.models.BlobName - :ivar deleted: Required. - :vartype deleted: bool - :ivar snapshot: Required. - :vartype snapshot: str - :ivar version_id: - :vartype version_id: str - :ivar is_current_version: - :vartype is_current_version: bool - :ivar properties: Properties of a blob. Required. - :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal - :ivar metadata: - :vartype metadata: ~azure.storage.blob.models.BlobMetadata - :ivar blob_tags: Blob tags. - :vartype blob_tags: ~azure.storage.blob.models.BlobTags - :ivar has_versions_only: - :vartype has_versions_only: bool - :ivar object_replication_metadata: Dictionary of :code:``. - :vartype object_replication_metadata: dict[str, str] - """ - - _validation = { - "name": {"required": True}, - "deleted": {"required": True}, - "snapshot": {"required": True}, - "properties": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "BlobName"}, - "deleted": {"key": "Deleted", "type": "bool"}, - "snapshot": {"key": "Snapshot", "type": "str"}, - "version_id": {"key": "VersionId", "type": "str"}, - "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, - "properties": {"key": "Properties", "type": "BlobPropertiesInternal"}, - "metadata": {"key": "Metadata", "type": "BlobMetadata"}, - "blob_tags": {"key": "BlobTags", "type": "BlobTags"}, - "has_versions_only": {"key": "HasVersionsOnly", "type": "bool"}, - "object_replication_metadata": {"key": "OrMetadata", "type": "{str}"}, - } - _xml_map = {"name": "Blob"} - - def __init__( - self, - *, - name: "_models.BlobName", - deleted: bool, - snapshot: str, - properties: "_models.BlobPropertiesInternal", - version_id: Optional[str] = None, - is_current_version: Optional[bool] = None, - metadata: Optional["_models.BlobMetadata"] = None, - blob_tags: Optional["_models.BlobTags"] = None, - has_versions_only: Optional[bool] = None, - object_replication_metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Required. - :paramtype name: ~azure.storage.blob.models.BlobName - :keyword deleted: Required. - :paramtype deleted: bool - :keyword snapshot: Required. - :paramtype snapshot: str - :keyword version_id: - :paramtype version_id: str - :keyword is_current_version: - :paramtype is_current_version: bool - :keyword properties: Properties of a blob. Required. - :paramtype properties: ~azure.storage.blob.models.BlobPropertiesInternal - :keyword metadata: - :paramtype metadata: ~azure.storage.blob.models.BlobMetadata - :keyword blob_tags: Blob tags. - :paramtype blob_tags: ~azure.storage.blob.models.BlobTags - :keyword has_versions_only: - :paramtype has_versions_only: bool - :keyword object_replication_metadata: Dictionary of :code:``. - :paramtype object_replication_metadata: dict[str, str] - """ - super().__init__(**kwargs) - self.name = name - self.deleted = deleted - self.snapshot = snapshot - self.version_id = version_id - self.is_current_version = is_current_version - self.properties = properties - self.metadata = metadata - self.blob_tags = blob_tags - self.has_versions_only = has_versions_only - self.object_replication_metadata = object_replication_metadata - - -class BlobMetadata(_serialization.Model): - """BlobMetadata. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, str] - :ivar encrypted: - :vartype encrypted: str - """ - - _attribute_map = { - "additional_properties": {"key": "", "type": "{str}"}, - "encrypted": {"key": "Encrypted", "type": "str", "xml": {"attr": True}}, - } - _xml_map = {"name": "Metadata"} - - def __init__( - self, *, additional_properties: Optional[Dict[str, str]] = None, encrypted: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, str] - :keyword encrypted: - :paramtype encrypted: str - """ - super().__init__(**kwargs) - self.additional_properties = additional_properties - self.encrypted = encrypted - - -class BlobName(_serialization.Model): - """BlobName. - - :ivar encoded: Indicates if the blob name is encoded. - :vartype encoded: bool - :ivar content: The name of the blob. - :vartype content: str - """ - - _attribute_map = { - "encoded": {"key": "Encoded", "type": "bool", "xml": {"name": "Encoded", "attr": True}}, - "content": {"key": "content", "type": "str", "xml": {"text": True}}, - } - - def __init__(self, *, encoded: Optional[bool] = None, content: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword encoded: Indicates if the blob name is encoded. - :paramtype encoded: bool - :keyword content: The name of the blob. - :paramtype content: str - """ - super().__init__(**kwargs) - self.encoded = encoded - self.content = content - - -class BlobPrefix(_serialization.Model): - """BlobPrefix. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: ~azure.storage.blob.models.BlobName - """ - - _validation = { - "name": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "BlobName"}, - } - - def __init__(self, *, name: "_models.BlobName", **kwargs: Any) -> None: - """ - :keyword name: Required. - :paramtype name: ~azure.storage.blob.models.BlobName - """ - super().__init__(**kwargs) - self.name = name - - -class BlobPropertiesInternal(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Properties of a blob. - - All required parameters must be populated in order to send to server. - - :ivar creation_time: - :vartype creation_time: ~datetime.datetime - :ivar last_modified: Required. - :vartype last_modified: ~datetime.datetime - :ivar etag: Required. - :vartype etag: str - :ivar content_length: Size in bytes. - :vartype content_length: int - :ivar content_type: - :vartype content_type: str - :ivar content_encoding: - :vartype content_encoding: str - :ivar content_language: - :vartype content_language: str - :ivar content_md5: - :vartype content_md5: bytes - :ivar content_disposition: - :vartype content_disposition: str - :ivar cache_control: - :vartype cache_control: str - :ivar blob_sequence_number: - :vartype blob_sequence_number: int - :ivar blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". - :vartype blob_type: str or ~azure.storage.blob.models.BlobType - :ivar lease_status: Known values are: "locked" and "unlocked". - :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and - "broken". - :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType - :ivar lease_duration: Known values are: "infinite" and "fixed". - :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :ivar copy_id: - :vartype copy_id: str - :ivar copy_status: Known values are: "pending", "success", "aborted", and "failed". - :vartype copy_status: str or ~azure.storage.blob.models.CopyStatusType - :ivar copy_source: - :vartype copy_source: str - :ivar copy_progress: - :vartype copy_progress: str - :ivar copy_completion_time: - :vartype copy_completion_time: ~datetime.datetime - :ivar copy_status_description: - :vartype copy_status_description: str - :ivar server_encrypted: - :vartype server_encrypted: bool - :ivar incremental_copy: - :vartype incremental_copy: bool - :ivar destination_snapshot: - :vartype destination_snapshot: str - :ivar deleted_time: - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: - :vartype remaining_retention_days: int - :ivar access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", - "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". - :vartype access_tier: str or ~azure.storage.blob.models.AccessTier - :ivar access_tier_inferred: - :vartype access_tier_inferred: bool - :ivar archive_status: Known values are: "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". - :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus - :ivar customer_provided_key_sha256: - :vartype customer_provided_key_sha256: str - :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. - :vartype encryption_scope: str - :ivar access_tier_change_time: - :vartype access_tier_change_time: ~datetime.datetime - :ivar tag_count: - :vartype tag_count: int - :ivar expires_on: - :vartype expires_on: ~datetime.datetime - :ivar is_sealed: - :vartype is_sealed: bool - :ivar rehydrate_priority: If an object is in rehydrate pending state then this header is - returned with priority of rehydrate. Valid values are High and Standard. Known values are: - "High" and "Standard". - :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :ivar last_accessed_on: - :vartype last_accessed_on: ~datetime.datetime - :ivar immutability_policy_expires_on: - :vartype immutability_policy_expires_on: ~datetime.datetime - :ivar immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". - :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :ivar legal_hold: - :vartype legal_hold: bool - """ - - _validation = { - "last_modified": {"required": True}, - "etag": {"required": True}, - } - - _attribute_map = { - "creation_time": {"key": "Creation-Time", "type": "rfc-1123"}, - "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, - "etag": {"key": "Etag", "type": "str"}, - "content_length": {"key": "Content-Length", "type": "int"}, - "content_type": {"key": "Content-Type", "type": "str"}, - "content_encoding": {"key": "Content-Encoding", "type": "str"}, - "content_language": {"key": "Content-Language", "type": "str"}, - "content_md5": {"key": "Content-MD5", "type": "bytearray"}, - "content_disposition": {"key": "Content-Disposition", "type": "str"}, - "cache_control": {"key": "Cache-Control", "type": "str"}, - "blob_sequence_number": {"key": "x-ms-blob-sequence-number", "type": "int"}, - "blob_type": {"key": "BlobType", "type": "str"}, - "lease_status": {"key": "LeaseStatus", "type": "str"}, - "lease_state": {"key": "LeaseState", "type": "str"}, - "lease_duration": {"key": "LeaseDuration", "type": "str"}, - "copy_id": {"key": "CopyId", "type": "str"}, - "copy_status": {"key": "CopyStatus", "type": "str"}, - "copy_source": {"key": "CopySource", "type": "str"}, - "copy_progress": {"key": "CopyProgress", "type": "str"}, - "copy_completion_time": {"key": "CopyCompletionTime", "type": "rfc-1123"}, - "copy_status_description": {"key": "CopyStatusDescription", "type": "str"}, - "server_encrypted": {"key": "ServerEncrypted", "type": "bool"}, - "incremental_copy": {"key": "IncrementalCopy", "type": "bool"}, - "destination_snapshot": {"key": "DestinationSnapshot", "type": "str"}, - "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, - "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, - "access_tier": {"key": "AccessTier", "type": "str"}, - "access_tier_inferred": {"key": "AccessTierInferred", "type": "bool"}, - "archive_status": {"key": "ArchiveStatus", "type": "str"}, - "customer_provided_key_sha256": {"key": "CustomerProvidedKeySha256", "type": "str"}, - "encryption_scope": {"key": "EncryptionScope", "type": "str"}, - "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"}, - "tag_count": {"key": "TagCount", "type": "int"}, - "expires_on": {"key": "Expiry-Time", "type": "rfc-1123"}, - "is_sealed": {"key": "Sealed", "type": "bool"}, - "rehydrate_priority": {"key": "RehydratePriority", "type": "str"}, - "last_accessed_on": {"key": "LastAccessTime", "type": "rfc-1123"}, - "immutability_policy_expires_on": {"key": "ImmutabilityPolicyUntilDate", "type": "rfc-1123"}, - "immutability_policy_mode": {"key": "ImmutabilityPolicyMode", "type": "str"}, - "legal_hold": {"key": "LegalHold", "type": "bool"}, - } - _xml_map = {"name": "Properties"} - - def __init__( # pylint: disable=too-many-locals - self, - *, - last_modified: datetime.datetime, - etag: str, - creation_time: Optional[datetime.datetime] = None, - content_length: Optional[int] = None, - content_type: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_md5: Optional[bytes] = None, - content_disposition: Optional[str] = None, - cache_control: Optional[str] = None, - blob_sequence_number: Optional[int] = None, - blob_type: Optional[Union[str, "_models.BlobType"]] = None, - lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, - copy_id: Optional[str] = None, - copy_status: Optional[Union[str, "_models.CopyStatusType"]] = None, - copy_source: Optional[str] = None, - copy_progress: Optional[str] = None, - copy_completion_time: Optional[datetime.datetime] = None, - copy_status_description: Optional[str] = None, - server_encrypted: Optional[bool] = None, - incremental_copy: Optional[bool] = None, - destination_snapshot: Optional[str] = None, - deleted_time: Optional[datetime.datetime] = None, - remaining_retention_days: Optional[int] = None, - access_tier: Optional[Union[str, "_models.AccessTier"]] = None, - access_tier_inferred: Optional[bool] = None, - archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = None, - customer_provided_key_sha256: Optional[str] = None, - encryption_scope: Optional[str] = None, - access_tier_change_time: Optional[datetime.datetime] = None, - tag_count: Optional[int] = None, - expires_on: Optional[datetime.datetime] = None, - is_sealed: Optional[bool] = None, - rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, - last_accessed_on: Optional[datetime.datetime] = None, - immutability_policy_expires_on: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, - legal_hold: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword creation_time: - :paramtype creation_time: ~datetime.datetime - :keyword last_modified: Required. - :paramtype last_modified: ~datetime.datetime - :keyword etag: Required. - :paramtype etag: str - :keyword content_length: Size in bytes. - :paramtype content_length: int - :keyword content_type: - :paramtype content_type: str - :keyword content_encoding: - :paramtype content_encoding: str - :keyword content_language: - :paramtype content_language: str - :keyword content_md5: - :paramtype content_md5: bytes - :keyword content_disposition: - :paramtype content_disposition: str - :keyword cache_control: - :paramtype cache_control: str - :keyword blob_sequence_number: - :paramtype blob_sequence_number: int - :keyword blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". - :paramtype blob_type: str or ~azure.storage.blob.models.BlobType - :keyword lease_status: Known values are: "locked" and "unlocked". - :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and - "broken". - :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType - :keyword lease_duration: Known values are: "infinite" and "fixed". - :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :keyword copy_id: - :paramtype copy_id: str - :keyword copy_status: Known values are: "pending", "success", "aborted", and "failed". - :paramtype copy_status: str or ~azure.storage.blob.models.CopyStatusType - :keyword copy_source: - :paramtype copy_source: str - :keyword copy_progress: - :paramtype copy_progress: str - :keyword copy_completion_time: - :paramtype copy_completion_time: ~datetime.datetime - :keyword copy_status_description: - :paramtype copy_status_description: str - :keyword server_encrypted: - :paramtype server_encrypted: bool - :keyword incremental_copy: - :paramtype incremental_copy: bool - :keyword destination_snapshot: - :paramtype destination_snapshot: str - :keyword deleted_time: - :paramtype deleted_time: ~datetime.datetime - :keyword remaining_retention_days: - :paramtype remaining_retention_days: int - :keyword access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", - "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". - :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier - :keyword access_tier_inferred: - :paramtype access_tier_inferred: bool - :keyword archive_status: Known values are: "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". - :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus - :keyword customer_provided_key_sha256: - :paramtype customer_provided_key_sha256: str - :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. - :paramtype encryption_scope: str - :keyword access_tier_change_time: - :paramtype access_tier_change_time: ~datetime.datetime - :keyword tag_count: - :paramtype tag_count: int - :keyword expires_on: - :paramtype expires_on: ~datetime.datetime - :keyword is_sealed: - :paramtype is_sealed: bool - :keyword rehydrate_priority: If an object is in rehydrate pending state then this header is - returned with priority of rehydrate. Valid values are High and Standard. Known values are: - "High" and "Standard". - :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :keyword last_accessed_on: - :paramtype last_accessed_on: ~datetime.datetime - :keyword immutability_policy_expires_on: - :paramtype immutability_policy_expires_on: ~datetime.datetime - :keyword immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". - :paramtype immutability_policy_mode: str or - ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :keyword legal_hold: - :paramtype legal_hold: bool - """ - super().__init__(**kwargs) - self.creation_time = creation_time - self.last_modified = last_modified - self.etag = etag - self.content_length = content_length - self.content_type = content_type - self.content_encoding = content_encoding - self.content_language = content_language - self.content_md5 = content_md5 - self.content_disposition = content_disposition - self.cache_control = cache_control - self.blob_sequence_number = blob_sequence_number - self.blob_type = blob_type - self.lease_status = lease_status - self.lease_state = lease_state - self.lease_duration = lease_duration - self.copy_id = copy_id - self.copy_status = copy_status - self.copy_source = copy_source - self.copy_progress = copy_progress - self.copy_completion_time = copy_completion_time - self.copy_status_description = copy_status_description - self.server_encrypted = server_encrypted - self.incremental_copy = incremental_copy - self.destination_snapshot = destination_snapshot - self.deleted_time = deleted_time - self.remaining_retention_days = remaining_retention_days - self.access_tier = access_tier - self.access_tier_inferred = access_tier_inferred - self.archive_status = archive_status - self.customer_provided_key_sha256 = customer_provided_key_sha256 - self.encryption_scope = encryption_scope - self.access_tier_change_time = access_tier_change_time - self.tag_count = tag_count - self.expires_on = expires_on - self.is_sealed = is_sealed - self.rehydrate_priority = rehydrate_priority - self.last_accessed_on = last_accessed_on - self.immutability_policy_expires_on = immutability_policy_expires_on - self.immutability_policy_mode = immutability_policy_mode - self.legal_hold = legal_hold - - -class BlobTag(_serialization.Model): - """BlobTag. - - All required parameters must be populated in order to send to server. - - :ivar key: Required. - :vartype key: str - :ivar value: Required. - :vartype value: str - """ - - _validation = { - "key": {"required": True}, - "value": {"required": True}, - } - - _attribute_map = { - "key": {"key": "Key", "type": "str"}, - "value": {"key": "Value", "type": "str"}, - } - _xml_map = {"name": "Tag"} - - def __init__(self, *, key: str, value: str, **kwargs: Any) -> None: - """ - :keyword key: Required. - :paramtype key: str - :keyword value: Required. - :paramtype value: str - """ - super().__init__(**kwargs) - self.key = key - self.value = value - - -class BlobTags(_serialization.Model): - """Blob tags. - - All required parameters must be populated in order to send to server. - - :ivar blob_tag_set: Required. - :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] - """ - - _validation = { - "blob_tag_set": {"required": True}, - } - - _attribute_map = { - "blob_tag_set": { - "key": "BlobTagSet", - "type": "[BlobTag]", - "xml": {"name": "TagSet", "wrapped": True, "itemsName": "Tag"}, - }, - } - _xml_map = {"name": "Tags"} - - def __init__(self, *, blob_tag_set: List["_models.BlobTag"], **kwargs: Any) -> None: - """ - :keyword blob_tag_set: Required. - :paramtype blob_tag_set: list[~azure.storage.blob.models.BlobTag] - """ - super().__init__(**kwargs) - self.blob_tag_set = blob_tag_set - - -class Block(_serialization.Model): - """Represents a single block in a block blob. It describes the block's ID and size. - - All required parameters must be populated in order to send to server. - - :ivar name: The base64 encoded block ID. Required. - :vartype name: str - :ivar size: The block size in bytes. Required. - :vartype size: int - """ - - _validation = { - "name": {"required": True}, - "size": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "str"}, - "size": {"key": "Size", "type": "int"}, - } - - def __init__(self, *, name: str, size: int, **kwargs: Any) -> None: - """ - :keyword name: The base64 encoded block ID. Required. - :paramtype name: str - :keyword size: The block size in bytes. Required. - :paramtype size: int - """ - super().__init__(**kwargs) - self.name = name - self.size = size - - -class BlockList(_serialization.Model): - """BlockList. - - :ivar committed_blocks: - :vartype committed_blocks: list[~azure.storage.blob.models.Block] - :ivar uncommitted_blocks: - :vartype uncommitted_blocks: list[~azure.storage.blob.models.Block] - """ - - _attribute_map = { - "committed_blocks": {"key": "CommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, - "uncommitted_blocks": {"key": "UncommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, - } - - def __init__( - self, - *, - committed_blocks: Optional[List["_models.Block"]] = None, - uncommitted_blocks: Optional[List["_models.Block"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword committed_blocks: - :paramtype committed_blocks: list[~azure.storage.blob.models.Block] - :keyword uncommitted_blocks: - :paramtype uncommitted_blocks: list[~azure.storage.blob.models.Block] - """ - super().__init__(**kwargs) - self.committed_blocks = committed_blocks - self.uncommitted_blocks = uncommitted_blocks - - -class BlockLookupList(_serialization.Model): - """BlockLookupList. - - :ivar committed: - :vartype committed: list[str] - :ivar uncommitted: - :vartype uncommitted: list[str] - :ivar latest: - :vartype latest: list[str] - """ - - _attribute_map = { - "committed": {"key": "Committed", "type": "[str]", "xml": {"itemsName": "Committed"}}, - "uncommitted": {"key": "Uncommitted", "type": "[str]", "xml": {"itemsName": "Uncommitted"}}, - "latest": {"key": "Latest", "type": "[str]", "xml": {"itemsName": "Latest"}}, - } - _xml_map = {"name": "BlockList"} - - def __init__( - self, - *, - committed: Optional[List[str]] = None, - uncommitted: Optional[List[str]] = None, - latest: Optional[List[str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword committed: - :paramtype committed: list[str] - :keyword uncommitted: - :paramtype uncommitted: list[str] - :keyword latest: - :paramtype latest: list[str] - """ - super().__init__(**kwargs) - self.committed = committed - self.uncommitted = uncommitted - self.latest = latest - - -class ClearRange(_serialization.Model): - """ClearRange. - - All required parameters must be populated in order to send to server. - - :ivar start: Required. - :vartype start: int - :ivar end: Required. - :vartype end: int - """ - - _validation = { - "start": {"required": True}, - "end": {"required": True}, - } - - _attribute_map = { - "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, - "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, - } - _xml_map = {"name": "ClearRange"} - - def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: - """ - :keyword start: Required. - :paramtype start: int - :keyword end: Required. - :paramtype end: int - """ - super().__init__(**kwargs) - self.start = start - self.end = end - - -class ContainerCpkScopeInfo(_serialization.Model): - """Parameter group. - - :ivar default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the default - encryption scope to set on the container and use for all future writes. - :vartype default_encryption_scope: str - :ivar prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, - prevents any request from specifying a different encryption scope than the scope set on the - container. - :vartype prevent_encryption_scope_override: bool - """ - - _attribute_map = { - "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, - "prevent_encryption_scope_override": {"key": "PreventEncryptionScopeOverride", "type": "bool"}, - } - - def __init__( - self, - *, - default_encryption_scope: Optional[str] = None, - prevent_encryption_scope_override: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the - default encryption scope to set on the container and use for all future writes. - :paramtype default_encryption_scope: str - :keyword prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, - prevents any request from specifying a different encryption scope than the scope set on the - container. - :paramtype prevent_encryption_scope_override: bool - """ - super().__init__(**kwargs) - self.default_encryption_scope = default_encryption_scope - self.prevent_encryption_scope_override = prevent_encryption_scope_override - - -class ContainerItem(_serialization.Model): - """An Azure Storage container. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: str - :ivar deleted: - :vartype deleted: bool - :ivar version: - :vartype version: str - :ivar properties: Properties of a container. Required. - :vartype properties: ~azure.storage.blob.models.ContainerProperties - :ivar metadata: Dictionary of :code:``. - :vartype metadata: dict[str, str] - """ - - _validation = { - "name": {"required": True}, - "properties": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "str"}, - "deleted": {"key": "Deleted", "type": "bool"}, - "version": {"key": "Version", "type": "str"}, - "properties": {"key": "Properties", "type": "ContainerProperties"}, - "metadata": {"key": "Metadata", "type": "{str}"}, - } - _xml_map = {"name": "Container"} - - def __init__( - self, - *, - name: str, - properties: "_models.ContainerProperties", - deleted: Optional[bool] = None, - version: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Required. - :paramtype name: str - :keyword deleted: - :paramtype deleted: bool - :keyword version: - :paramtype version: str - :keyword properties: Properties of a container. Required. - :paramtype properties: ~azure.storage.blob.models.ContainerProperties - :keyword metadata: Dictionary of :code:``. - :paramtype metadata: dict[str, str] - """ - super().__init__(**kwargs) - self.name = name - self.deleted = deleted - self.version = version - self.properties = properties - self.metadata = metadata - - -class ContainerProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Properties of a container. - - All required parameters must be populated in order to send to server. - - :ivar last_modified: Required. - :vartype last_modified: ~datetime.datetime - :ivar etag: Required. - :vartype etag: str - :ivar lease_status: Known values are: "locked" and "unlocked". - :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and - "broken". - :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType - :ivar lease_duration: Known values are: "infinite" and "fixed". - :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :ivar public_access: Known values are: "container" and "blob". - :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType - :ivar has_immutability_policy: - :vartype has_immutability_policy: bool - :ivar has_legal_hold: - :vartype has_legal_hold: bool - :ivar default_encryption_scope: - :vartype default_encryption_scope: str - :ivar prevent_encryption_scope_override: - :vartype prevent_encryption_scope_override: bool - :ivar deleted_time: - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: - :vartype remaining_retention_days: int - :ivar is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled - on this container. - :vartype is_immutable_storage_with_versioning_enabled: bool - """ - - _validation = { - "last_modified": {"required": True}, - "etag": {"required": True}, - } - - _attribute_map = { - "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, - "etag": {"key": "Etag", "type": "str"}, - "lease_status": {"key": "LeaseStatus", "type": "str"}, - "lease_state": {"key": "LeaseState", "type": "str"}, - "lease_duration": {"key": "LeaseDuration", "type": "str"}, - "public_access": {"key": "PublicAccess", "type": "str"}, - "has_immutability_policy": {"key": "HasImmutabilityPolicy", "type": "bool"}, - "has_legal_hold": {"key": "HasLegalHold", "type": "bool"}, - "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, - "prevent_encryption_scope_override": {"key": "DenyEncryptionScopeOverride", "type": "bool"}, - "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, - "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, - "is_immutable_storage_with_versioning_enabled": { - "key": "ImmutableStorageWithVersioningEnabled", - "type": "bool", - }, - } - - def __init__( - self, - *, - last_modified: datetime.datetime, - etag: str, - lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, - public_access: Optional[Union[str, "_models.PublicAccessType"]] = None, - has_immutability_policy: Optional[bool] = None, - has_legal_hold: Optional[bool] = None, - default_encryption_scope: Optional[str] = None, - prevent_encryption_scope_override: Optional[bool] = None, - deleted_time: Optional[datetime.datetime] = None, - remaining_retention_days: Optional[int] = None, - is_immutable_storage_with_versioning_enabled: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword last_modified: Required. - :paramtype last_modified: ~datetime.datetime - :keyword etag: Required. - :paramtype etag: str - :keyword lease_status: Known values are: "locked" and "unlocked". - :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and - "broken". - :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType - :keyword lease_duration: Known values are: "infinite" and "fixed". - :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :keyword public_access: Known values are: "container" and "blob". - :paramtype public_access: str or ~azure.storage.blob.models.PublicAccessType - :keyword has_immutability_policy: - :paramtype has_immutability_policy: bool - :keyword has_legal_hold: - :paramtype has_legal_hold: bool - :keyword default_encryption_scope: - :paramtype default_encryption_scope: str - :keyword prevent_encryption_scope_override: - :paramtype prevent_encryption_scope_override: bool - :keyword deleted_time: - :paramtype deleted_time: ~datetime.datetime - :keyword remaining_retention_days: - :paramtype remaining_retention_days: int - :keyword is_immutable_storage_with_versioning_enabled: Indicates if version level worm is - enabled on this container. - :paramtype is_immutable_storage_with_versioning_enabled: bool - """ - super().__init__(**kwargs) - self.last_modified = last_modified - self.etag = etag - self.lease_status = lease_status - self.lease_state = lease_state - self.lease_duration = lease_duration - self.public_access = public_access - self.has_immutability_policy = has_immutability_policy - self.has_legal_hold = has_legal_hold - self.default_encryption_scope = default_encryption_scope - self.prevent_encryption_scope_override = prevent_encryption_scope_override - self.deleted_time = deleted_time - self.remaining_retention_days = remaining_retention_days - self.is_immutable_storage_with_versioning_enabled = is_immutable_storage_with_versioning_enabled - - -class CorsRule(_serialization.Model): - """CORS is an HTTP feature that enables a web application running under one domain to access - resources in another domain. Web browsers implement a security restriction known as same-origin - policy that prevents a web page from calling APIs in a different domain; CORS provides a secure - way to allow one domain (the origin domain) to call APIs in another domain. - - All required parameters must be populated in order to send to server. - - :ivar allowed_origins: The origin domains that are permitted to make a request against the - storage service via CORS. The origin domain is the domain from which the request originates. - Note that the origin must be an exact case-sensitive match with the origin that the user age - sends to the service. You can also use the wildcard character '*' to allow all origin domains - to make requests via CORS. Required. - :vartype allowed_origins: str - :ivar allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a - CORS request. (comma separated). Required. - :vartype allowed_methods: str - :ivar allowed_headers: the request headers that the origin domain may specify on the CORS - request. Required. - :vartype allowed_headers: str - :ivar exposed_headers: The response headers that may be sent in the response to the CORS - request and exposed by the browser to the request issuer. Required. - :vartype exposed_headers: str - :ivar max_age_in_seconds: The maximum amount time that a browser should cache the preflight - OPTIONS request. Required. - :vartype max_age_in_seconds: int - """ - - _validation = { - "allowed_origins": {"required": True}, - "allowed_methods": {"required": True}, - "allowed_headers": {"required": True}, - "exposed_headers": {"required": True}, - "max_age_in_seconds": {"required": True, "minimum": 0}, - } - - _attribute_map = { - "allowed_origins": {"key": "AllowedOrigins", "type": "str"}, - "allowed_methods": {"key": "AllowedMethods", "type": "str"}, - "allowed_headers": {"key": "AllowedHeaders", "type": "str"}, - "exposed_headers": {"key": "ExposedHeaders", "type": "str"}, - "max_age_in_seconds": {"key": "MaxAgeInSeconds", "type": "int"}, - } - - def __init__( - self, - *, - allowed_origins: str, - allowed_methods: str, - allowed_headers: str, - exposed_headers: str, - max_age_in_seconds: int, - **kwargs: Any - ) -> None: - """ - :keyword allowed_origins: The origin domains that are permitted to make a request against the - storage service via CORS. The origin domain is the domain from which the request originates. - Note that the origin must be an exact case-sensitive match with the origin that the user age - sends to the service. You can also use the wildcard character '*' to allow all origin domains - to make requests via CORS. Required. - :paramtype allowed_origins: str - :keyword allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a - CORS request. (comma separated). Required. - :paramtype allowed_methods: str - :keyword allowed_headers: the request headers that the origin domain may specify on the CORS - request. Required. - :paramtype allowed_headers: str - :keyword exposed_headers: The response headers that may be sent in the response to the CORS - request and exposed by the browser to the request issuer. Required. - :paramtype exposed_headers: str - :keyword max_age_in_seconds: The maximum amount time that a browser should cache the preflight - OPTIONS request. Required. - :paramtype max_age_in_seconds: int - """ - super().__init__(**kwargs) - self.allowed_origins = allowed_origins - self.allowed_methods = allowed_methods - self.allowed_headers = allowed_headers - self.exposed_headers = exposed_headers - self.max_age_in_seconds = max_age_in_seconds - - -class CpkInfo(_serialization.Model): - """Parameter group. - - :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data - provided in the request. If not specified, encryption is performed with the root account - encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :vartype encryption_key: str - :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided - if the x-ms-encryption-key header is provided. - :vartype encryption_key_sha256: str - :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. Known values are: "None" and "AES256". - :vartype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType - """ - - _attribute_map = { - "encryption_key": {"key": "encryptionKey", "type": "str"}, - "encryption_key_sha256": {"key": "encryptionKeySha256", "type": "str"}, - "encryption_algorithm": {"key": "encryptionAlgorithm", "type": "str"}, - } - - def __init__( - self, - *, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, "_models.EncryptionAlgorithmType"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data - provided in the request. If not specified, encryption is performed with the root account - encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :paramtype encryption_key: str - :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be - provided if the x-ms-encryption-key header is provided. - :paramtype encryption_key_sha256: str - :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. - Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key - header is provided. Known values are: "None" and "AES256". - :paramtype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType - """ - super().__init__(**kwargs) - self.encryption_key = encryption_key - self.encryption_key_sha256 = encryption_key_sha256 - self.encryption_algorithm = encryption_algorithm - - -class CpkScopeInfo(_serialization.Model): - """Parameter group. - - :ivar encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the - encryption scope to use to encrypt the data provided in the request. If not specified, - encryption is performed with the default account encryption scope. For more information, see - Encryption at Rest for Azure Storage Services. - :vartype encryption_scope: str - """ - - _attribute_map = { - "encryption_scope": {"key": "encryptionScope", "type": "str"}, - } - - def __init__(self, *, encryption_scope: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the - encryption scope to use to encrypt the data provided in the request. If not specified, - encryption is performed with the default account encryption scope. For more information, see - Encryption at Rest for Azure Storage Services. - :paramtype encryption_scope: str - """ - super().__init__(**kwargs) - self.encryption_scope = encryption_scope - - -class DelimitedTextConfiguration(_serialization.Model): - """Groups the settings used for interpreting the blob data if the blob is delimited text - formatted. - - :ivar column_separator: The string used to separate columns. - :vartype column_separator: str - :ivar field_quote: The string used to quote a specific field. - :vartype field_quote: str - :ivar record_separator: The string used to separate records. - :vartype record_separator: str - :ivar escape_char: The string used as an escape character. - :vartype escape_char: str - :ivar headers_present: Represents whether the data has headers. - :vartype headers_present: bool - """ - - _attribute_map = { - "column_separator": {"key": "ColumnSeparator", "type": "str", "xml": {"name": "ColumnSeparator"}}, - "field_quote": {"key": "FieldQuote", "type": "str", "xml": {"name": "FieldQuote"}}, - "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, - "escape_char": {"key": "EscapeChar", "type": "str", "xml": {"name": "EscapeChar"}}, - "headers_present": {"key": "HeadersPresent", "type": "bool", "xml": {"name": "HasHeaders"}}, - } - _xml_map = {"name": "DelimitedTextConfiguration"} - - def __init__( - self, - *, - column_separator: Optional[str] = None, - field_quote: Optional[str] = None, - record_separator: Optional[str] = None, - escape_char: Optional[str] = None, - headers_present: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword column_separator: The string used to separate columns. - :paramtype column_separator: str - :keyword field_quote: The string used to quote a specific field. - :paramtype field_quote: str - :keyword record_separator: The string used to separate records. - :paramtype record_separator: str - :keyword escape_char: The string used as an escape character. - :paramtype escape_char: str - :keyword headers_present: Represents whether the data has headers. - :paramtype headers_present: bool - """ - super().__init__(**kwargs) - self.column_separator = column_separator - self.field_quote = field_quote - self.record_separator = record_separator - self.escape_char = escape_char - self.headers_present = headers_present - - -class FilterBlobItem(_serialization.Model): - """Blob info from a Filter Blobs API call. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: str - :ivar container_name: Required. - :vartype container_name: str - :ivar tags: Blob tags. - :vartype tags: ~azure.storage.blob.models.BlobTags - :ivar version_id: - :vartype version_id: str - :ivar is_current_version: - :vartype is_current_version: bool - """ - - _validation = { - "name": {"required": True}, - "container_name": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "str"}, - "container_name": {"key": "ContainerName", "type": "str"}, - "tags": {"key": "Tags", "type": "BlobTags"}, - "version_id": {"key": "VersionId", "type": "str"}, - "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, - } - _xml_map = {"name": "Blob"} - - def __init__( - self, - *, - name: str, - container_name: str, - tags: Optional["_models.BlobTags"] = None, - version_id: Optional[str] = None, - is_current_version: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Required. - :paramtype name: str - :keyword container_name: Required. - :paramtype container_name: str - :keyword tags: Blob tags. - :paramtype tags: ~azure.storage.blob.models.BlobTags - :keyword version_id: - :paramtype version_id: str - :keyword is_current_version: - :paramtype is_current_version: bool - """ - super().__init__(**kwargs) - self.name = name - self.container_name = container_name - self.tags = tags - self.version_id = version_id - self.is_current_version = is_current_version - - -class FilterBlobSegment(_serialization.Model): - """The result of a Filter Blobs API call. - - All required parameters must be populated in order to send to server. - - :ivar service_endpoint: Required. - :vartype service_endpoint: str - :ivar where: Required. - :vartype where: str - :ivar blobs: Required. - :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] - :ivar next_marker: - :vartype next_marker: str - """ - - _validation = { - "service_endpoint": {"required": True}, - "where": {"required": True}, - "blobs": {"required": True}, - } - - _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, - "where": {"key": "Where", "type": "str"}, - "blobs": { - "key": "Blobs", - "type": "[FilterBlobItem]", - "xml": {"name": "Blobs", "wrapped": True, "itemsName": "Blob"}, - }, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - _xml_map = {"name": "EnumerationResults"} - - def __init__( - self, - *, - service_endpoint: str, - where: str, - blobs: List["_models.FilterBlobItem"], - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword service_endpoint: Required. - :paramtype service_endpoint: str - :keyword where: Required. - :paramtype where: str - :keyword blobs: Required. - :paramtype blobs: list[~azure.storage.blob.models.FilterBlobItem] - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.service_endpoint = service_endpoint - self.where = where - self.blobs = blobs - self.next_marker = next_marker - - -class GeoReplication(_serialization.Model): - """Geo-Replication information for the Secondary Storage Service. - - All required parameters must be populated in order to send to server. - - :ivar status: The status of the secondary location. Required. Known values are: "live", - "bootstrap", and "unavailable". - :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatusType - :ivar last_sync_time: A GMT date/time value, to the second. All primary writes preceding this - value are guaranteed to be available for read operations at the secondary. Primary writes after - this point in time may or may not be available for reads. Required. - :vartype last_sync_time: ~datetime.datetime - """ - - _validation = { - "status": {"required": True}, - "last_sync_time": {"required": True}, - } - - _attribute_map = { - "status": {"key": "Status", "type": "str"}, - "last_sync_time": {"key": "LastSyncTime", "type": "rfc-1123"}, - } - - def __init__( - self, - *, - status: Union[str, "_models.GeoReplicationStatusType"], - last_sync_time: datetime.datetime, - **kwargs: Any - ) -> None: - """ - :keyword status: The status of the secondary location. Required. Known values are: "live", - "bootstrap", and "unavailable". - :paramtype status: str or ~azure.storage.blob.models.GeoReplicationStatusType - :keyword last_sync_time: A GMT date/time value, to the second. All primary writes preceding - this value are guaranteed to be available for read operations at the secondary. Primary writes - after this point in time may or may not be available for reads. Required. - :paramtype last_sync_time: ~datetime.datetime - """ - super().__init__(**kwargs) - self.status = status - self.last_sync_time = last_sync_time - - -class JsonTextConfiguration(_serialization.Model): - """json text configuration. - - :ivar record_separator: The string used to separate records. - :vartype record_separator: str - """ - - _attribute_map = { - "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, - } - _xml_map = {"name": "JsonTextConfiguration"} - - def __init__(self, *, record_separator: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword record_separator: The string used to separate records. - :paramtype record_separator: str - """ - super().__init__(**kwargs) - self.record_separator = record_separator - - -class KeyInfo(_serialization.Model): - """Key information. - - All required parameters must be populated in order to send to server. - - :ivar start: The date-time the key is active in ISO 8601 UTC time. Required. - :vartype start: str - :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. - :vartype expiry: str - """ - - _validation = { - "start": {"required": True}, - "expiry": {"required": True}, - } - - _attribute_map = { - "start": {"key": "Start", "type": "str"}, - "expiry": {"key": "Expiry", "type": "str"}, - } - - def __init__(self, *, start: str, expiry: str, **kwargs: Any) -> None: - """ - :keyword start: The date-time the key is active in ISO 8601 UTC time. Required. - :paramtype start: str - :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. - :paramtype expiry: str - """ - super().__init__(**kwargs) - self.start = start - self.expiry = expiry - - -class LeaseAccessConditions(_serialization.Model): - """Parameter group. - - :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and - matches this ID. - :vartype lease_id: str - """ - - _attribute_map = { - "lease_id": {"key": "leaseId", "type": "str"}, - } - - def __init__(self, *, lease_id: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active - and matches this ID. - :paramtype lease_id: str - """ - super().__init__(**kwargs) - self.lease_id = lease_id - - -class ListBlobsFlatSegmentResponse(_serialization.Model): - """An enumeration of blobs. - - All required parameters must be populated in order to send to server. - - :ivar service_endpoint: Required. - :vartype service_endpoint: str - :ivar container_name: Required. - :vartype container_name: str - :ivar prefix: - :vartype prefix: str - :ivar marker: - :vartype marker: str - :ivar max_results: - :vartype max_results: int - :ivar segment: Required. - :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment - :ivar next_marker: - :vartype next_marker: str - """ - - _validation = { - "service_endpoint": {"required": True}, - "container_name": {"required": True}, - "segment": {"required": True}, - } - - _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, - "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, - "prefix": {"key": "Prefix", "type": "str"}, - "marker": {"key": "Marker", "type": "str"}, - "max_results": {"key": "MaxResults", "type": "int"}, - "segment": {"key": "Segment", "type": "BlobFlatListSegment"}, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - _xml_map = {"name": "EnumerationResults"} - - def __init__( - self, - *, - service_endpoint: str, - container_name: str, - segment: "_models.BlobFlatListSegment", - prefix: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword service_endpoint: Required. - :paramtype service_endpoint: str - :keyword container_name: Required. - :paramtype container_name: str - :keyword prefix: - :paramtype prefix: str - :keyword marker: - :paramtype marker: str - :keyword max_results: - :paramtype max_results: int - :keyword segment: Required. - :paramtype segment: ~azure.storage.blob.models.BlobFlatListSegment - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.service_endpoint = service_endpoint - self.container_name = container_name - self.prefix = prefix - self.marker = marker - self.max_results = max_results - self.segment = segment - self.next_marker = next_marker - - -class ListBlobsHierarchySegmentResponse(_serialization.Model): - """An enumeration of blobs. - - All required parameters must be populated in order to send to server. - - :ivar service_endpoint: Required. - :vartype service_endpoint: str - :ivar container_name: Required. - :vartype container_name: str - :ivar prefix: - :vartype prefix: str - :ivar marker: - :vartype marker: str - :ivar max_results: - :vartype max_results: int - :ivar delimiter: - :vartype delimiter: str - :ivar segment: Required. - :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment - :ivar next_marker: - :vartype next_marker: str - """ - - _validation = { - "service_endpoint": {"required": True}, - "container_name": {"required": True}, - "segment": {"required": True}, - } - - _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, - "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, - "prefix": {"key": "Prefix", "type": "str"}, - "marker": {"key": "Marker", "type": "str"}, - "max_results": {"key": "MaxResults", "type": "int"}, - "delimiter": {"key": "Delimiter", "type": "str"}, - "segment": {"key": "Segment", "type": "BlobHierarchyListSegment"}, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - _xml_map = {"name": "EnumerationResults"} - - def __init__( - self, - *, - service_endpoint: str, - container_name: str, - segment: "_models.BlobHierarchyListSegment", - prefix: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - delimiter: Optional[str] = None, - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword service_endpoint: Required. - :paramtype service_endpoint: str - :keyword container_name: Required. - :paramtype container_name: str - :keyword prefix: - :paramtype prefix: str - :keyword marker: - :paramtype marker: str - :keyword max_results: - :paramtype max_results: int - :keyword delimiter: - :paramtype delimiter: str - :keyword segment: Required. - :paramtype segment: ~azure.storage.blob.models.BlobHierarchyListSegment - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.service_endpoint = service_endpoint - self.container_name = container_name - self.prefix = prefix - self.marker = marker - self.max_results = max_results - self.delimiter = delimiter - self.segment = segment - self.next_marker = next_marker - - -class ListContainersSegmentResponse(_serialization.Model): - """An enumeration of containers. - - All required parameters must be populated in order to send to server. - - :ivar service_endpoint: Required. - :vartype service_endpoint: str - :ivar prefix: - :vartype prefix: str - :ivar marker: - :vartype marker: str - :ivar max_results: - :vartype max_results: int - :ivar container_items: Required. - :vartype container_items: list[~azure.storage.blob.models.ContainerItem] - :ivar next_marker: - :vartype next_marker: str - """ - - _validation = { - "service_endpoint": {"required": True}, - "container_items": {"required": True}, - } - - _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, - "prefix": {"key": "Prefix", "type": "str"}, - "marker": {"key": "Marker", "type": "str"}, - "max_results": {"key": "MaxResults", "type": "int"}, - "container_items": { - "key": "ContainerItems", - "type": "[ContainerItem]", - "xml": {"name": "Containers", "wrapped": True, "itemsName": "Container"}, - }, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - _xml_map = {"name": "EnumerationResults"} - - def __init__( - self, - *, - service_endpoint: str, - container_items: List["_models.ContainerItem"], - prefix: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword service_endpoint: Required. - :paramtype service_endpoint: str - :keyword prefix: - :paramtype prefix: str - :keyword marker: - :paramtype marker: str - :keyword max_results: - :paramtype max_results: int - :keyword container_items: Required. - :paramtype container_items: list[~azure.storage.blob.models.ContainerItem] - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.service_endpoint = service_endpoint - self.prefix = prefix - self.marker = marker - self.max_results = max_results - self.container_items = container_items - self.next_marker = next_marker - - -class Logging(_serialization.Model): - """Azure Analytics Logging settings. - - All required parameters must be populated in order to send to server. - - :ivar version: The version of Storage Analytics to configure. Required. - :vartype version: str - :ivar delete: Indicates whether all delete requests should be logged. Required. - :vartype delete: bool - :ivar read: Indicates whether all read requests should be logged. Required. - :vartype read: bool - :ivar write: Indicates whether all write requests should be logged. Required. - :vartype write: bool - :ivar retention_policy: the retention policy which determines how long the associated data - should persist. Required. - :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - - _validation = { - "version": {"required": True}, - "delete": {"required": True}, - "read": {"required": True}, - "write": {"required": True}, - "retention_policy": {"required": True}, - } - - _attribute_map = { - "version": {"key": "Version", "type": "str"}, - "delete": {"key": "Delete", "type": "bool"}, - "read": {"key": "Read", "type": "bool"}, - "write": {"key": "Write", "type": "bool"}, - "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, - } - - def __init__( - self, - *, - version: str, - delete: bool, - read: bool, - write: bool, - retention_policy: "_models.RetentionPolicy", - **kwargs: Any - ) -> None: - """ - :keyword version: The version of Storage Analytics to configure. Required. - :paramtype version: str - :keyword delete: Indicates whether all delete requests should be logged. Required. - :paramtype delete: bool - :keyword read: Indicates whether all read requests should be logged. Required. - :paramtype read: bool - :keyword write: Indicates whether all write requests should be logged. Required. - :paramtype write: bool - :keyword retention_policy: the retention policy which determines how long the associated data - should persist. Required. - :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - super().__init__(**kwargs) - self.version = version - self.delete = delete - self.read = read - self.write = write - self.retention_policy = retention_policy - - -class Metrics(_serialization.Model): - """a summary of request statistics grouped by API in hour or minute aggregates for blobs. - - All required parameters must be populated in order to send to server. - - :ivar version: The version of Storage Analytics to configure. - :vartype version: str - :ivar enabled: Indicates whether metrics are enabled for the Blob service. Required. - :vartype enabled: bool - :ivar include_apis: Indicates whether metrics should generate summary statistics for called API - operations. - :vartype include_apis: bool - :ivar retention_policy: the retention policy which determines how long the associated data - should persist. - :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - - _validation = { - "enabled": {"required": True}, - } - - _attribute_map = { - "version": {"key": "Version", "type": "str"}, - "enabled": {"key": "Enabled", "type": "bool"}, - "include_apis": {"key": "IncludeAPIs", "type": "bool"}, - "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, - } - - def __init__( - self, - *, - enabled: bool, - version: Optional[str] = None, - include_apis: Optional[bool] = None, - retention_policy: Optional["_models.RetentionPolicy"] = None, - **kwargs: Any - ) -> None: - """ - :keyword version: The version of Storage Analytics to configure. - :paramtype version: str - :keyword enabled: Indicates whether metrics are enabled for the Blob service. Required. - :paramtype enabled: bool - :keyword include_apis: Indicates whether metrics should generate summary statistics for called - API operations. - :paramtype include_apis: bool - :keyword retention_policy: the retention policy which determines how long the associated data - should persist. - :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - super().__init__(**kwargs) - self.version = version - self.enabled = enabled - self.include_apis = include_apis - self.retention_policy = retention_policy - - -class ModifiedAccessConditions(_serialization.Model): - """Parameter group. - - :ivar if_modified_since: Specify this header value to operate only on a blob if it has been - modified since the specified date/time. - :vartype if_modified_since: ~datetime.datetime - :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not - been modified since the specified date/time. - :vartype if_unmodified_since: ~datetime.datetime - :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. - :vartype if_match: str - :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :vartype if_none_match: str - :ivar if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching - value. - :vartype if_tags: str - """ - - _attribute_map = { - "if_modified_since": {"key": "ifModifiedSince", "type": "rfc-1123"}, - "if_unmodified_since": {"key": "ifUnmodifiedSince", "type": "rfc-1123"}, - "if_match": {"key": "ifMatch", "type": "str"}, - "if_none_match": {"key": "ifNoneMatch", "type": "str"}, - "if_tags": {"key": "ifTags", "type": "str"}, - } - - def __init__( - self, - *, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword if_modified_since: Specify this header value to operate only on a blob if it has been - modified since the specified date/time. - :paramtype if_modified_since: ~datetime.datetime - :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not - been modified since the specified date/time. - :paramtype if_unmodified_since: ~datetime.datetime - :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. - :paramtype if_match: str - :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching - value. - :paramtype if_none_match: str - :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :paramtype if_tags: str - """ - super().__init__(**kwargs) - self.if_modified_since = if_modified_since - self.if_unmodified_since = if_unmodified_since - self.if_match = if_match - self.if_none_match = if_none_match - self.if_tags = if_tags - - -class PageList(_serialization.Model): - """the list of pages. - - :ivar page_range: - :vartype page_range: list[~azure.storage.blob.models.PageRange] - :ivar clear_range: - :vartype clear_range: list[~azure.storage.blob.models.ClearRange] - :ivar next_marker: - :vartype next_marker: str - """ - - _attribute_map = { - "page_range": {"key": "PageRange", "type": "[PageRange]", "xml": {"itemsName": "PageRange"}}, - "clear_range": {"key": "ClearRange", "type": "[ClearRange]", "xml": {"itemsName": "ClearRange"}}, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - - def __init__( - self, - *, - page_range: Optional[List["_models.PageRange"]] = None, - clear_range: Optional[List["_models.ClearRange"]] = None, - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword page_range: - :paramtype page_range: list[~azure.storage.blob.models.PageRange] - :keyword clear_range: - :paramtype clear_range: list[~azure.storage.blob.models.ClearRange] - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.page_range = page_range - self.clear_range = clear_range - self.next_marker = next_marker - - -class PageRange(_serialization.Model): - """PageRange. - - All required parameters must be populated in order to send to server. - - :ivar start: Required. - :vartype start: int - :ivar end: Required. - :vartype end: int - """ - - _validation = { - "start": {"required": True}, - "end": {"required": True}, - } - - _attribute_map = { - "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, - "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, - } - _xml_map = {"name": "PageRange"} - - def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: - """ - :keyword start: Required. - :paramtype start: int - :keyword end: Required. - :paramtype end: int - """ - super().__init__(**kwargs) - self.start = start - self.end = end - - -class QueryFormat(_serialization.Model): - """QueryFormat. - - All required parameters must be populated in order to send to server. - - :ivar type: The quick query format type. Required. Known values are: "delimited", "json", - "arrow", and "parquet". - :vartype type: str or ~azure.storage.blob.models.QueryFormatType - :ivar delimited_text_configuration: Groups the settings used for interpreting the blob data if - the blob is delimited text formatted. - :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration - :ivar json_text_configuration: json text configuration. - :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration - :ivar arrow_configuration: Groups the settings used for formatting the response if the response - should be Arrow formatted. - :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration - :ivar parquet_text_configuration: parquet configuration. - :vartype parquet_text_configuration: JSON - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "Type", "type": "str", "xml": {"name": "Type"}}, - "delimited_text_configuration": {"key": "DelimitedTextConfiguration", "type": "DelimitedTextConfiguration"}, - "json_text_configuration": {"key": "JsonTextConfiguration", "type": "JsonTextConfiguration"}, - "arrow_configuration": {"key": "ArrowConfiguration", "type": "ArrowConfiguration"}, - "parquet_text_configuration": {"key": "ParquetTextConfiguration", "type": "object"}, - } - - def __init__( - self, - *, - type: Union[str, "_models.QueryFormatType"], - delimited_text_configuration: Optional["_models.DelimitedTextConfiguration"] = None, - json_text_configuration: Optional["_models.JsonTextConfiguration"] = None, - arrow_configuration: Optional["_models.ArrowConfiguration"] = None, - parquet_text_configuration: Optional[JSON] = None, - **kwargs: Any - ) -> None: - """ - :keyword type: The quick query format type. Required. Known values are: "delimited", "json", - "arrow", and "parquet". - :paramtype type: str or ~azure.storage.blob.models.QueryFormatType - :keyword delimited_text_configuration: Groups the settings used for interpreting the blob data - if the blob is delimited text formatted. - :paramtype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration - :keyword json_text_configuration: json text configuration. - :paramtype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration - :keyword arrow_configuration: Groups the settings used for formatting the response if the - response should be Arrow formatted. - :paramtype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration - :keyword parquet_text_configuration: parquet configuration. - :paramtype parquet_text_configuration: JSON - """ - super().__init__(**kwargs) - self.type = type - self.delimited_text_configuration = delimited_text_configuration - self.json_text_configuration = json_text_configuration - self.arrow_configuration = arrow_configuration - self.parquet_text_configuration = parquet_text_configuration - - -class QueryRequest(_serialization.Model): - """Groups the set of query request settings. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar query_type: Required. The type of the provided query expression. Required. Default value - is "SQL". - :vartype query_type: str - :ivar expression: The query expression in SQL. The maximum size of the query expression is - 256KiB. Required. - :vartype expression: str - :ivar input_serialization: - :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization - :ivar output_serialization: - :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization - """ - - _validation = { - "query_type": {"required": True, "constant": True}, - "expression": {"required": True}, - } - - _attribute_map = { - "query_type": {"key": "QueryType", "type": "str", "xml": {"name": "QueryType"}}, - "expression": {"key": "Expression", "type": "str", "xml": {"name": "Expression"}}, - "input_serialization": {"key": "InputSerialization", "type": "QuerySerialization"}, - "output_serialization": {"key": "OutputSerialization", "type": "QuerySerialization"}, - } - _xml_map = {"name": "QueryRequest"} - - query_type = "SQL" - - def __init__( - self, - *, - expression: str, - input_serialization: Optional["_models.QuerySerialization"] = None, - output_serialization: Optional["_models.QuerySerialization"] = None, - **kwargs: Any - ) -> None: - """ - :keyword expression: The query expression in SQL. The maximum size of the query expression is - 256KiB. Required. - :paramtype expression: str - :keyword input_serialization: - :paramtype input_serialization: ~azure.storage.blob.models.QuerySerialization - :keyword output_serialization: - :paramtype output_serialization: ~azure.storage.blob.models.QuerySerialization - """ - super().__init__(**kwargs) - self.expression = expression - self.input_serialization = input_serialization - self.output_serialization = output_serialization - - -class QuerySerialization(_serialization.Model): - """QuerySerialization. - - All required parameters must be populated in order to send to server. - - :ivar format: Required. - :vartype format: ~azure.storage.blob.models.QueryFormat - """ - - _validation = { - "format": {"required": True}, - } - - _attribute_map = { - "format": {"key": "Format", "type": "QueryFormat"}, - } - - def __init__(self, *, format: "_models.QueryFormat", **kwargs: Any) -> None: - """ - :keyword format: Required. - :paramtype format: ~azure.storage.blob.models.QueryFormat - """ - super().__init__(**kwargs) - self.format = format - - -class RetentionPolicy(_serialization.Model): - """the retention policy which determines how long the associated data should persist. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Indicates whether a retention policy is enabled for the storage service. - Required. - :vartype enabled: bool - :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be - retained. All data older than this value will be deleted. - :vartype days: int - :ivar allow_permanent_delete: Indicates whether permanent delete is allowed on this storage - account. - :vartype allow_permanent_delete: bool - """ - - _validation = { - "enabled": {"required": True}, - "days": {"minimum": 1}, - } - - _attribute_map = { - "enabled": {"key": "Enabled", "type": "bool"}, - "days": {"key": "Days", "type": "int"}, - "allow_permanent_delete": {"key": "AllowPermanentDelete", "type": "bool"}, - } - - def __init__( - self, *, enabled: bool, days: Optional[int] = None, allow_permanent_delete: Optional[bool] = None, **kwargs: Any - ) -> None: - """ - :keyword enabled: Indicates whether a retention policy is enabled for the storage service. - Required. - :paramtype enabled: bool - :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should - be retained. All data older than this value will be deleted. - :paramtype days: int - :keyword allow_permanent_delete: Indicates whether permanent delete is allowed on this storage - account. - :paramtype allow_permanent_delete: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - self.days = days - self.allow_permanent_delete = allow_permanent_delete - - -class SequenceNumberAccessConditions(_serialization.Model): - """Parameter group. - - :ivar if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a - blob if it has a sequence number less than or equal to the specified. - :vartype if_sequence_number_less_than_or_equal_to: int - :ivar if_sequence_number_less_than: Specify this header value to operate only on a blob if it - has a sequence number less than the specified. - :vartype if_sequence_number_less_than: int - :ivar if_sequence_number_equal_to: Specify this header value to operate only on a blob if it - has the specified sequence number. - :vartype if_sequence_number_equal_to: int - """ - - _attribute_map = { - "if_sequence_number_less_than_or_equal_to": {"key": "ifSequenceNumberLessThanOrEqualTo", "type": "int"}, - "if_sequence_number_less_than": {"key": "ifSequenceNumberLessThan", "type": "int"}, - "if_sequence_number_equal_to": {"key": "ifSequenceNumberEqualTo", "type": "int"}, - } - - def __init__( - self, - *, - if_sequence_number_less_than_or_equal_to: Optional[int] = None, - if_sequence_number_less_than: Optional[int] = None, - if_sequence_number_equal_to: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on - a blob if it has a sequence number less than or equal to the specified. - :paramtype if_sequence_number_less_than_or_equal_to: int - :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if - it has a sequence number less than the specified. - :paramtype if_sequence_number_less_than: int - :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it - has the specified sequence number. - :paramtype if_sequence_number_equal_to: int - """ - super().__init__(**kwargs) - self.if_sequence_number_less_than_or_equal_to = if_sequence_number_less_than_or_equal_to - self.if_sequence_number_less_than = if_sequence_number_less_than - self.if_sequence_number_equal_to = if_sequence_number_equal_to - - -class SignedIdentifier(_serialization.Model): - """signed identifier. - - All required parameters must be populated in order to send to server. - - :ivar id: a unique id. Required. - :vartype id: str - :ivar access_policy: An Access policy. - :vartype access_policy: ~azure.storage.blob.models.AccessPolicy - """ - - _validation = { - "id": {"required": True}, - } - - _attribute_map = { - "id": {"key": "Id", "type": "str"}, - "access_policy": {"key": "AccessPolicy", "type": "AccessPolicy"}, - } - _xml_map = {"name": "SignedIdentifier"} - - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - access_policy: Optional["_models.AccessPolicy"] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: a unique id. Required. - :paramtype id: str - :keyword access_policy: An Access policy. - :paramtype access_policy: ~azure.storage.blob.models.AccessPolicy - """ - super().__init__(**kwargs) - self.id = id - self.access_policy = access_policy - - -class SourceModifiedAccessConditions(_serialization.Model): - """Parameter group. - - :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has - been modified since the specified date/time. - :vartype source_if_modified_since: ~datetime.datetime - :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has - not been modified since the specified date/time. - :vartype source_if_unmodified_since: ~datetime.datetime - :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :vartype source_if_match: str - :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching - value. - :vartype source_if_none_match: str - :ivar source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :vartype source_if_tags: str - """ - - _attribute_map = { - "source_if_modified_since": {"key": "sourceIfModifiedSince", "type": "rfc-1123"}, - "source_if_unmodified_since": {"key": "sourceIfUnmodifiedSince", "type": "rfc-1123"}, - "source_if_match": {"key": "sourceIfMatch", "type": "str"}, - "source_if_none_match": {"key": "sourceIfNoneMatch", "type": "str"}, - "source_if_tags": {"key": "sourceIfTags", "type": "str"}, - } - - def __init__( - self, - *, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - source_if_tags: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword source_if_modified_since: Specify this header value to operate only on a blob if it - has been modified since the specified date/time. - :paramtype source_if_modified_since: ~datetime.datetime - :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it - has not been modified since the specified date/time. - :paramtype source_if_unmodified_since: ~datetime.datetime - :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :paramtype source_if_match: str - :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a - matching value. - :paramtype source_if_none_match: str - :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with - a matching value. - :paramtype source_if_tags: str - """ - super().__init__(**kwargs) - self.source_if_modified_since = source_if_modified_since - self.source_if_unmodified_since = source_if_unmodified_since - self.source_if_match = source_if_match - self.source_if_none_match = source_if_none_match - self.source_if_tags = source_if_tags - - -class StaticWebsite(_serialization.Model): - """The properties that enable an account to host a static website. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Indicates whether this account is hosting a static website. Required. - :vartype enabled: bool - :ivar index_document: The default name of the index page under each directory. - :vartype index_document: str - :ivar error_document404_path: The absolute path of the custom 404 page. - :vartype error_document404_path: str - :ivar default_index_document_path: Absolute path of the default index page. - :vartype default_index_document_path: str - """ - - _validation = { - "enabled": {"required": True}, - } - - _attribute_map = { - "enabled": {"key": "Enabled", "type": "bool"}, - "index_document": {"key": "IndexDocument", "type": "str"}, - "error_document404_path": {"key": "ErrorDocument404Path", "type": "str"}, - "default_index_document_path": {"key": "DefaultIndexDocumentPath", "type": "str"}, - } - - def __init__( - self, - *, - enabled: bool, - index_document: Optional[str] = None, - error_document404_path: Optional[str] = None, - default_index_document_path: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword enabled: Indicates whether this account is hosting a static website. Required. - :paramtype enabled: bool - :keyword index_document: The default name of the index page under each directory. - :paramtype index_document: str - :keyword error_document404_path: The absolute path of the custom 404 page. - :paramtype error_document404_path: str - :keyword default_index_document_path: Absolute path of the default index page. - :paramtype default_index_document_path: str - """ - super().__init__(**kwargs) - self.enabled = enabled - self.index_document = index_document - self.error_document404_path = error_document404_path - self.default_index_document_path = default_index_document_path - - -class StorageError(_serialization.Model): - """StorageError. - - :ivar message: - :vartype message: str - """ - - _attribute_map = { - "message": {"key": "Message", "type": "str"}, - } - - def __init__(self, *, message: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword message: - :paramtype message: str - """ - super().__init__(**kwargs) - self.message = message - - -class StorageServiceProperties(_serialization.Model): - """Storage Service Properties. - - :ivar logging: Azure Analytics Logging settings. - :vartype logging: ~azure.storage.blob.models.Logging - :ivar hour_metrics: a summary of request statistics grouped by API in hour or minute aggregates - for blobs. - :vartype hour_metrics: ~azure.storage.blob.models.Metrics - :ivar minute_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :vartype minute_metrics: ~azure.storage.blob.models.Metrics - :ivar cors: The set of CORS rules. - :vartype cors: list[~azure.storage.blob.models.CorsRule] - :ivar default_service_version: The default version to use for requests to the Blob service if - an incoming request's version is not specified. Possible values include version 2008-10-27 and - all more recent versions. - :vartype default_service_version: str - :ivar delete_retention_policy: the retention policy which determines how long the associated - data should persist. - :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy - :ivar static_website: The properties that enable an account to host a static website. - :vartype static_website: ~azure.storage.blob.models.StaticWebsite - """ - - _attribute_map = { - "logging": {"key": "Logging", "type": "Logging"}, - "hour_metrics": {"key": "HourMetrics", "type": "Metrics"}, - "minute_metrics": {"key": "MinuteMetrics", "type": "Metrics"}, - "cors": {"key": "Cors", "type": "[CorsRule]", "xml": {"wrapped": True}}, - "default_service_version": {"key": "DefaultServiceVersion", "type": "str"}, - "delete_retention_policy": {"key": "DeleteRetentionPolicy", "type": "RetentionPolicy"}, - "static_website": {"key": "StaticWebsite", "type": "StaticWebsite"}, - } - - def __init__( - self, - *, - logging: Optional["_models.Logging"] = None, - hour_metrics: Optional["_models.Metrics"] = None, - minute_metrics: Optional["_models.Metrics"] = None, - cors: Optional[List["_models.CorsRule"]] = None, - default_service_version: Optional[str] = None, - delete_retention_policy: Optional["_models.RetentionPolicy"] = None, - static_website: Optional["_models.StaticWebsite"] = None, - **kwargs: Any - ) -> None: - """ - :keyword logging: Azure Analytics Logging settings. - :paramtype logging: ~azure.storage.blob.models.Logging - :keyword hour_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :paramtype hour_metrics: ~azure.storage.blob.models.Metrics - :keyword minute_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :paramtype minute_metrics: ~azure.storage.blob.models.Metrics - :keyword cors: The set of CORS rules. - :paramtype cors: list[~azure.storage.blob.models.CorsRule] - :keyword default_service_version: The default version to use for requests to the Blob service - if an incoming request's version is not specified. Possible values include version 2008-10-27 - and all more recent versions. - :paramtype default_service_version: str - :keyword delete_retention_policy: the retention policy which determines how long the associated - data should persist. - :paramtype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy - :keyword static_website: The properties that enable an account to host a static website. - :paramtype static_website: ~azure.storage.blob.models.StaticWebsite - """ - super().__init__(**kwargs) - self.logging = logging - self.hour_metrics = hour_metrics - self.minute_metrics = minute_metrics - self.cors = cors - self.default_service_version = default_service_version - self.delete_retention_policy = delete_retention_policy - self.static_website = static_website - - -class StorageServiceStats(_serialization.Model): - """Stats for the storage service. - - :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. - :vartype geo_replication: ~azure.storage.blob.models.GeoReplication - """ - - _attribute_map = { - "geo_replication": {"key": "GeoReplication", "type": "GeoReplication"}, - } - - def __init__(self, *, geo_replication: Optional["_models.GeoReplication"] = None, **kwargs: Any) -> None: - """ - :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. - :paramtype geo_replication: ~azure.storage.blob.models.GeoReplication - """ - super().__init__(**kwargs) - self.geo_replication = geo_replication - - -class UserDelegationKey(_serialization.Model): - """A user delegation key. - - All required parameters must be populated in order to send to server. - - :ivar signed_oid: The Azure Active Directory object ID in GUID format. Required. - :vartype signed_oid: str - :ivar signed_tid: The Azure Active Directory tenant ID in GUID format. Required. - :vartype signed_tid: str - :ivar signed_start: The date-time the key is active. Required. - :vartype signed_start: ~datetime.datetime - :ivar signed_expiry: The date-time the key expires. Required. - :vartype signed_expiry: ~datetime.datetime - :ivar signed_service: Abbreviation of the Azure Storage service that accepts the key. Required. - :vartype signed_service: str - :ivar signed_version: The service version that created the key. Required. - :vartype signed_version: str - :ivar value: The key as a base64 string. Required. - :vartype value: str - """ - - _validation = { - "signed_oid": {"required": True}, - "signed_tid": {"required": True}, - "signed_start": {"required": True}, - "signed_expiry": {"required": True}, - "signed_service": {"required": True}, - "signed_version": {"required": True}, - "value": {"required": True}, - } - - _attribute_map = { - "signed_oid": {"key": "SignedOid", "type": "str"}, - "signed_tid": {"key": "SignedTid", "type": "str"}, - "signed_start": {"key": "SignedStart", "type": "iso-8601"}, - "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, - "signed_service": {"key": "SignedService", "type": "str"}, - "signed_version": {"key": "SignedVersion", "type": "str"}, - "value": {"key": "Value", "type": "str"}, - } - - def __init__( - self, - *, - signed_oid: str, - signed_tid: str, - signed_start: datetime.datetime, - signed_expiry: datetime.datetime, - signed_service: str, - signed_version: str, - value: str, - **kwargs: Any - ) -> None: - """ - :keyword signed_oid: The Azure Active Directory object ID in GUID format. Required. - :paramtype signed_oid: str - :keyword signed_tid: The Azure Active Directory tenant ID in GUID format. Required. - :paramtype signed_tid: str - :keyword signed_start: The date-time the key is active. Required. - :paramtype signed_start: ~datetime.datetime - :keyword signed_expiry: The date-time the key expires. Required. - :paramtype signed_expiry: ~datetime.datetime - :keyword signed_service: Abbreviation of the Azure Storage service that accepts the key. - Required. - :paramtype signed_service: str - :keyword signed_version: The service version that created the key. Required. - :paramtype signed_version: str - :keyword value: The key as a base64 string. Required. - :paramtype value: str - """ - super().__init__(**kwargs) - self.signed_oid = signed_oid - self.signed_tid = signed_tid - self.signed_start = signed_start - self.signed_expiry = signed_expiry - self.signed_service = signed_service - self.signed_version = signed_version - self.value = value diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py deleted file mode 100644 index 91b8dd56c7f7..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py +++ /dev/null @@ -1,1090 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - url: str, - *, - content_length: int, - timeout: Optional[int] = None, - blob_content_type: Optional[str] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_cache_control: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_append_block_request( - url: str, - *, - content_length: int, - content: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - lease_id: Optional[str] = None, - max_size: Optional[int] = None, - append_position: Optional[int] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if max_size is not None: - _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") - if append_position is not None: - _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_append_block_from_url_request( - url: str, - *, - source_url: str, - content_length: int, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - lease_id: Optional[str] = None, - max_size: Optional[int] = None, - append_position: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") - if source_range is not None: - _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") - if source_content_md5 is not None: - _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") - if source_contentcrc64 is not None: - _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( - "source_contentcrc64", source_contentcrc64, "bytearray" - ) - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if max_size is not None: - _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") - if append_position is not None: - _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if copy_source_authorization is not None: - _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( - "copy_source_authorization", copy_source_authorization, "str" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_seal_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - append_position: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if append_position is not None: - _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -class AppendBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`append_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def create( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create Append Blob operation creates a new append blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_create_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def append_block( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Append Block operation commits a new block of data to the end of an existing append blob. - The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _max_size = None - _append_position = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - _max_size = append_position_access_conditions.max_size - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_append_block_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - lease_id=_lease_id, - max_size=_max_size, - append_position=_append_position, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-append-offset"] = self._deserialize( - "str", response.headers.get("x-ms-blob-append-offset") - ) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def append_block_from_url( # pylint: disable=inconsistent-return-statements - self, - source_url: str, - content_length: int, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Append Block operation commits a new block of data to the end of an existing append blob - where the contents are read from a source url. The Append Block operation is permitted only if - the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on - version 2015-02-21 version or later. - - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param content_length: The length of the request. Required. - :type content_length: int - :param source_range: Bytes of source data in the specified range. Default value is None. - :type source_range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _max_size = None - _append_position = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - _max_size = append_position_access_conditions.max_size - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_append_block_from_url_request( - url=self._config.url, - source_url=source_url, - content_length=content_length, - source_range=source_range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - max_size=_max_size, - append_position=_append_position, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-append-offset"] = self._deserialize( - "str", response.headers.get("x-ms-blob-append-offset") - ) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def seal( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on - version 2019-12-12 version or later. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. Default value is None. - :type append_position_access_conditions: - ~azure.storage.blob.models.AppendPositionAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _append_position = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position - - _request = build_seal_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - append_position=_append_position, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py deleted file mode 100644 index 83033f6b9f4c..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ /dev/null @@ -1,4525 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, Iterator, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_download_request( - url: str, - *, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - range_get_content_md5: Optional[bool] = None, - range_get_content_crc64: Optional[bool] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if range is not None: - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if range_get_content_md5 is not None: - _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header( - "range_get_content_md5", range_get_content_md5, "bool" - ) - if range_get_content_crc64 is not None: - _headers["x-ms-range-get-content-crc64"] = _SERIALIZER.header( - "range_get_content_crc64", range_get_content_crc64, "bool" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_properties_request( - url: str, - *, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - url: str, - *, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - blob_delete_type: Literal["Permanent"] = "Permanent", - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if blob_delete_type is not None: - _params["deletetype"] = _SERIALIZER.query("blob_delete_type", blob_delete_type, "str") - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if delete_snapshots is not None: - _headers["x-ms-delete-snapshots"] = _SERIALIZER.header("delete_snapshots", delete_snapshots, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_undelete_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_expiry_request( - url: str, - *, - expiry_options: Union[str, _models.BlobExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") - if expires_on is not None: - _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_http_headers_request( - url: str, - *, - timeout: Optional[int] = None, - blob_cache_control: Optional[str] = None, - blob_content_type: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_immutability_policy_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_immutability_policy_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_legal_hold_request( - url: str, - *, - legal_hold: bool, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_metadata_request( - url: str, - *, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_acquire_lease_request( - url: str, - *, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - if duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_release_lease_request( - url: str, - *, - lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_renew_lease_request( - url: str, - *, - lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_change_lease_request( - url: str, - *, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_break_lease_request( - url: str, - *, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - if break_period is not None: - _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_snapshot_request( - url: str, - *, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_start_copy_from_url_request( - url: str, - *, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - source_if_tags: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - seal_blob: Optional[bool] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if rehydrate_priority is not None: - _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - if source_if_tags is not None: - _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if seal_blob is not None: - _headers["x-ms-seal-blob"] = _SERIALIZER.header("seal_blob", seal_blob, "bool") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_copy_from_url_request( - url: str, - *, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - encryption_scope: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-requires-sync"] = _SERIALIZER.header("x_ms_requires_sync", x_ms_requires_sync, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if source_content_md5 is not None: - _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - if copy_source_authorization is not None: - _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( - "copy_source_authorization", copy_source_authorization, "str" - ) - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if copy_source_tags is not None: - _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_abort_copy_from_url_request( - url: str, - *, - copy_id: str, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) - copy_action_abort_constant: Literal["abort"] = kwargs.pop( - "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") - ) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["copyid"] = _SERIALIZER.query("copy_id", copy_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-copy-action"] = _SERIALIZER.header("copy_action_abort_constant", copy_action_abort_constant, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_tier_request( - url: str, - *, - tier: Union[str, _models.AccessTierRequired], - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - request_id_parameter: Optional[str] = None, - lease_id: Optional[str] = None, - if_tags: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if rehydrate_priority is not None: - _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_query_request( - url: str, - *, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_get_tags_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - if_tags: Optional[str] = None, - lease_id: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_tags_request( - url: str, - *, - timeout: Optional[int] = None, - version_id: Optional[str] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - if_tags: Optional[str] = None, - lease_id: Optional[str] = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if version_id is not None: - _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -class BlobOperations: # pylint: disable=too-many-public-methods - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def download( - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - range_get_content_md5: Optional[bool] = None, - range_get_content_crc64: Optional[bool] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> Iterator[bytes]: - """The Download operation reads or downloads a blob from the system, including its metadata and - properties. You can also call Download to read a snapshot. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param range_get_content_md5: When set to true and specified together with the Range, the - service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. Default value is None. - :type range_get_content_md5: bool - :param range_get_content_crc64: When set to true and specified together with the Range, the - service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. Default value is None. - :type range_get_content_crc64: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: Iterator[bytes] or the result of cls(response) - :rtype: Iterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_download_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - range=range, - lease_id=_lease_id, - range_get_content_md5=range_get_content_md5, - range_get_content_crc64=range_get_content_crc64, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_properties( # pylint: disable=inconsistent-return-statements - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and - system properties for the blob. It does not return the content of the blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_properties_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) - response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-incremental-copy"] = self._deserialize( - "bool", response.headers.get("x-ms-incremental-copy") - ) - response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( - "str", response.headers.get("x-ms-copy-destination-snapshot") - ) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) - response_headers["x-ms-access-tier-inferred"] = self._deserialize( - "bool", response.headers.get("x-ms-access-tier-inferred") - ) - response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) - response_headers["x-ms-access-tier-change-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-access-tier-change-time") - ) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["x-ms-is-current-version"] = self._deserialize( - "bool", response.headers.get("x-ms-is-current-version") - ) - response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) - response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) - response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) - response_headers["x-ms-rehydrate-priority"] = self._deserialize( - "str", response.headers.get("x-ms-rehydrate-priority") - ) - response_headers["x-ms-last-access-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-last-access-time") - ) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, - request_id_parameter: Optional[str] = None, - blob_delete_type: Literal["Permanent"] = "Permanent", - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - permanently removed from the storage account. If the storage account's soft delete feature is - enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] - (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's - data is permanently removed from the storage account. Note that you continue to be charged for - the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and - specify the "include=deleted" query parameter to discover which blobs and snapshots have been - soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other - operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code - of 404 (ResourceNotFound). - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the - following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. Known values are: "include" and "only". - Default value is None. - :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and - None. Default value is "Permanent". - :type blob_delete_type: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - lease_id=_lease_id, - delete_snapshots=delete_snapshots, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_delete_type=blob_delete_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def undelete( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Undelete a blob that was previously soft deleted. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_undelete_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_expiry( # pylint: disable=inconsistent-return-statements - self, - expiry_options: Union[str, _models.BlobExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets the time a blob will expire and be deleted. - - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_expiry_request( - url=self._config.url, - expiry_options=expiry_options, - timeout=timeout, - request_id_parameter=request_id_parameter, - expires_on=expires_on, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_http_headers( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set HTTP Headers operation sets system properties on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_http_headers_request( - url=self._config.url, - timeout=timeout, - blob_cache_control=_blob_cache_control, - blob_content_type=_blob_content_type, - blob_content_md5=_blob_content_md5, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_content_disposition=_blob_content_disposition, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_immutability_policy( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Immutability Policy operation sets the immutability policy on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_immutability_policy_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - if_unmodified_since=_if_unmodified_since, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") - ) - response_headers["x-ms-immutability-policy-mode"] = self._deserialize( - "str", response.headers.get("x-ms-immutability-policy-mode") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def delete_immutability_policy( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """The Delete Immutability Policy operation deletes the immutability policy on the blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_immutability_policy_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_legal_hold( # pylint: disable=inconsistent-return-statements - self, legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """The Set Legal Hold operation sets a legal hold on the blob. - - :param legal_hold: Specified if a legal hold should be set on the blob. Required. - :type legal_hold: bool - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_legal_hold_request( - url=self._config.url, - legal_hold=legal_hold, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_metadata( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or - more name-value pairs. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_metadata_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def acquire_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a - lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. Default value is None. - :type duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_acquire_lease_request( - url=self._config.url, - timeout=timeout, - duration=duration, - proposed_lease_id=proposed_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def release_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_release_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def renew_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_renew_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def change_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Required. - :type proposed_lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_change_lease_request( - url=self._config.url, - lease_id=lease_id, - proposed_lease_id=proposed_lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def break_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - operations. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param break_period: For a break operation, proposed duration the lease should continue before - it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining on the lease is used. A new - lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a - fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease - breaks immediately. Default value is None. - :type break_period: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_break_lease_request( - url=self._config.url, - timeout=timeout, - break_period=break_period, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def create_snapshot( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create Snapshot operation creates a read-only snapshot of a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_create_snapshot_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def start_copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - seal_blob: Optional[bool] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Start Copy From URL operation copies a blob or an internet resource to a new blob. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. Known values are: "High" and "Standard". Default value is None. - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. Default value is None. - :type seal_blob: bool - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_tags = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_tags = source_modified_access_conditions.source_if_tags - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_start_copy_from_url_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - metadata=metadata, - tier=tier, - rehydrate_priority=rehydrate_priority, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_tags=_source_if_tags, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - seal_blob=seal_blob, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not - return a response until the copy is complete. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be - copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and - "COPY". Default value is None. - :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - x_ms_requires_sync: Literal["true"] = kwargs.pop( - "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") - ) - cls: ClsType[None] = kwargs.pop("cls", None) - - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _lease_id = None - _encryption_scope = None - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - - _request = build_copy_from_url_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - metadata=metadata, - tier=tier, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - source_content_md5=source_content_md5, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - copy_source_authorization=copy_source_authorization, - encryption_scope=_encryption_scope, - copy_source_tags=copy_source_tags, - x_ms_requires_sync=x_ms_requires_sync, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def abort_copy_from_url( # pylint: disable=inconsistent-return-statements - self, - copy_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a - destination blob with zero length and full metadata. - - :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. Required. - :type copy_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) - copy_action_abort_constant: Literal["abort"] = kwargs.pop( - "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") - ) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_abort_copy_from_url_request( - url=self._config.url, - copy_id=copy_id, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - comp=comp, - copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_tier( # pylint: disable=inconsistent-return-statements - self, - tier: Union[str, _models.AccessTierRequired], - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - timeout: Optional[int] = None, - rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a - premium storage account and on a block blob in a blob storage account (locally redundant - storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of - the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not - update the blob's ETag. - - :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", - "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". - Required. - :type tier: str or ~azure.storage.blob.models.AccessTierRequired - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. Known values are: "High" and "Standard". Default value is None. - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - - _request = build_set_tier_request( - url=self._config.url, - tier=tier, - snapshot=snapshot, - version_id=version_id, - timeout=timeout, - rehydrate_priority=rehydrate_priority, - request_id_parameter=request_id_parameter, - lease_id=_lease_id, - if_tags=_if_tags, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def query( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - query_request: Optional[_models.QueryRequest] = None, - **kwargs: Any - ) -> Iterator[bytes]: - """The Query operation enables users to select/project on blob data by providing simple query - expressions. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param query_request: the query request. Default value is None. - :type query_request: ~azure.storage.blob.models.QueryRequest - :return: Iterator[bytes] or the result of cls(response) - :rtype: Iterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if query_request is not None: - _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) - else: - _content = None - - _request = build_query_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-committed-block-count"] = self._deserialize( - "int", response.headers.get("x-ms-blob-committed-block-count") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-blob-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-blob-content-md5") - ) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_tags( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> _models.BlobTags: - """The Get Tags operation enables users to get the tags associated with a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: BlobTags or the result of cls(response) - :rtype: ~azure.storage.blob.models.BlobTags - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) - - _if_tags = None - _lease_id = None - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_tags_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - snapshot=snapshot, - version_id=version_id, - if_tags=_if_tags, - lease_id=_lease_id, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("BlobTags", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def set_tags( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - version_id: Optional[str] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - tags: Optional[_models.BlobTags] = None, - **kwargs: Any - ) -> None: - """The Set Tags operation enables users to set tags on a blob. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. - Default value is None. - :type version_id: str - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param tags: Blob tags. Default value is None. - :type tags: ~azure.storage.blob.models.BlobTags - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_tags = None - _lease_id = None - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if tags is not None: - _content = self._serialize.body(tags, "BlobTags", is_xml=True) - else: - _content = None - - _request = build_set_tags_request( - url=self._config.url, - timeout=timeout, - version_id=version_id, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - request_id_parameter=request_id_parameter, - if_tags=_if_tags, - lease_id=_lease_id, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py deleted file mode 100644 index ab161913ceb0..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py +++ /dev/null @@ -1,1738 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_upload_request( - url: str, - *, - content_length: int, - content: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - blob_content_type: Optional[str] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_cache_control: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - transactional_content_crc64: Optional[bytes] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_put_blob_from_url_request( - url: str, - *, - content_length: int, - copy_source: str, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - blob_content_type: Optional[str] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_cache_control: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - source_if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - copy_source_blob_properties: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - if source_if_tags is not None: - _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if source_content_md5 is not None: - _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") - if copy_source_blob_properties is not None: - _headers["x-ms-copy-source-blob-properties"] = _SERIALIZER.header( - "copy_source_blob_properties", copy_source_blob_properties, "bool" - ) - if copy_source_authorization is not None: - _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( - "copy_source_authorization", copy_source_authorization, "str" - ) - if copy_source_tags is not None: - _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_stage_block_request( - url: str, - *, - block_id: str, - content_length: int, - content: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_stage_block_from_url_request( - url: str, - *, - block_id: str, - content_length: int, - source_url: str, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - lease_id: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") - if source_range is not None: - _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") - if source_content_md5 is not None: - _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") - if source_contentcrc64 is not None: - _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( - "source_contentcrc64", source_contentcrc64, "bytearray" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if copy_source_authorization is not None: - _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( - "copy_source_authorization", copy_source_authorization, "str" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_commit_block_list_request( - url: str, - *, - content: Any, - timeout: Optional[int] = None, - blob_cache_control: Optional[str] = None, - blob_content_type: Optional[str] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_get_block_list_request( - url: str, - *, - snapshot: Optional[str] = None, - list_type: Union[str, _models.BlockListType] = "committed", - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - _params["blocklisttype"] = _SERIALIZER.query("list_type", list_type, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class BlockBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`block_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def upload( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - transactional_content_crc64: Optional[bytes] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Block Blob operation updates the content of an existing block blob. Updating an - existing block blob overwrites any existing metadata on the blob. Partial updates are not - supported with Put Blob; the content of the existing blob is overwritten with the content of - the new blob. To perform a partial update of the content of a block blob, use the Put Block - List operation. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_upload_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - transactional_content_crc64=transactional_content_crc64, - blob_type=blob_type, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def put_blob_from_url( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - copy_source: str, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - blob_tags_string: Optional[str] = None, - copy_source_blob_properties: Optional[bool] = None, - copy_source_authorization: Optional[str] = None, - copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are - read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial - updates are not supported with Put Blob from URL; the content of an existing blob is - overwritten with the content of the new blob. To perform partial updates to a block blob’s - contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - - :param content_length: The length of the request. Required. - :type content_length: int - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. Default value is None. - :type copy_source_blob_properties: bool - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be - copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and - "COPY". Default value is None. - :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_tags = source_modified_access_conditions.source_if_tags - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_put_blob_from_url_request( - url=self._config.url, - content_length=content_length, - copy_source=copy_source, - timeout=timeout, - transactional_content_md5=transactional_content_md5, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_tags=_source_if_tags, - request_id_parameter=request_id_parameter, - source_content_md5=source_content_md5, - blob_tags_string=blob_tags_string, - copy_source_blob_properties=copy_source_blob_properties, - copy_source_authorization=copy_source_authorization, - copy_source_tags=copy_source_tags, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def stage_block( # pylint: disable=inconsistent-return-statements - self, - block_id: str, - content_length: int, - body: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """The Stage Block operation creates a new block to be committed as part of a blob. - - :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the - string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. Required. - :type block_id: str - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - _content = body - - _request = build_stage_block_request( - url=self._config.url, - block_id=block_id, - content_length=content_length, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - request_id_parameter=request_id_parameter, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def stage_block_from_url( # pylint: disable=inconsistent-return-statements - self, - block_id: str, - content_length: int, - source_url: str, - source_range: Optional[str] = None, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Stage Block operation creates a new block to be committed as part of a blob where the - contents are read from a URL. - - :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the - string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. Required. - :type block_id: str - :param content_length: The length of the request. Required. - :type content_length: int - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param source_range: Bytes of source data in the specified range. Default value is None. - :type source_range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_stage_block_from_url_request( - url=self._config.url, - block_id=block_id, - content_length=content_length, - source_url=source_url, - source_range=source_range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def commit_block_list( # pylint: disable=inconsistent-return-statements - self, - blocks: _models.BlockLookupList, - timeout: Optional[int] = None, - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - metadata: Optional[Dict[str, str]] = None, - tier: Optional[Union[str, _models.AccessTierOptional]] = None, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Commit Block List operation writes a blob by specifying the list of block IDs that make up - the blob. In order to be written as part of a blob, a block must have been successfully written - to the server in a prior Put Block operation. You can call Put Block List to update a blob by - uploading only those blocks that have changed, then committing the new and existing blocks - together. You can do this by specifying whether to commit a block from the committed block list - or from the uncommitted block list, or to commit the most recently uploaded version of the - block, whichever list it may belong to. - - :param blocks: Blob Blocks. Required. - :type blocks: ~azure.storage.blob.models.BlockLookupList - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. - :type tier: str or ~azure.storage.blob.models.AccessTierOptional - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) - - _request = build_commit_block_list_request( - url=self._config.url, - timeout=timeout, - blob_cache_control=_blob_cache_control, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - tier=tier, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_block_list( - self, - snapshot: Optional[str] = None, - list_type: Union[str, _models.BlockListType] = "committed", - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.BlockList: - """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a - block blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and - "all". Default value is "committed". - :type list_type: str or ~azure.storage.blob.models.BlockListType - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: BlockList or the result of cls(response) - :rtype: ~azure.storage.blob.models.BlockList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) - - _lease_id = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_tags = modified_access_conditions.if_tags - - _request = build_get_block_list_request( - url=self._config.url, - snapshot=snapshot, - list_type=list_type, - timeout=timeout, - lease_id=_lease_id, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("BlockList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py deleted file mode 100644 index 719059977673..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py +++ /dev/null @@ -1,2628 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - url: str, - *, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - request_id_parameter: Optional[str] = None, - default_encryption_scope: Optional[str] = None, - prevent_encryption_scope_override: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if access is not None: - _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if default_encryption_scope is not None: - _headers["x-ms-default-encryption-scope"] = _SERIALIZER.header( - "default_encryption_scope", default_encryption_scope, "str" - ) - if prevent_encryption_scope_override is not None: - _headers["x-ms-deny-encryption-scope-override"] = _SERIALIZER.header( - "prevent_encryption_scope_override", prevent_encryption_scope_override, "bool" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_properties_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_metadata_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - if_modified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_access_policy_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_access_policy_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if access is not None: - _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_restore_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - deleted_container_name: Optional[str] = None, - deleted_container_version: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if deleted_container_name is not None: - _headers["x-ms-deleted-container-name"] = _SERIALIZER.header( - "deleted_container_name", deleted_container_name, "str" - ) - if deleted_container_version is not None: - _headers["x-ms-deleted-container-version"] = _SERIALIZER.header( - "deleted_container_version", deleted_container_version, "str" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_rename_request( - url: str, - *, - source_container_name: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - source_lease_id: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-source-container-name"] = _SERIALIZER.header("source_container_name", source_container_name, "str") - if source_lease_id is not None: - _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_submit_batch_request( - url: str, - *, - content_length: int, - content: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if multipart_content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_filter_blobs_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if where is not None: - _params["where"] = _SERIALIZER.query("where", where, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_acquire_lease_request( - url: str, - *, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - if duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_release_lease_request( - url: str, - *, - lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_renew_lease_request( - url: str, - *, - lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_break_lease_request( - url: str, - *, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - if break_period is not None: - _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_change_lease_request( - url: str, - *, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_blob_flat_segment_request( - url: str, - *, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if prefix is not None: - _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long - url: str, - *, - delimiter: str, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if prefix is not None: - _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") - _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class ContainerOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`container` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def create( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - request_id_parameter: Optional[str] = None, - container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, - **kwargs: Any - ) -> None: - """creates a new container under the specified account. If the container with the same name - already exists, the operation fails. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param access: Specifies whether data in the container may be accessed publicly and the level - of access. Known values are: "container" and "blob". Default value is None. - :type access: str or ~azure.storage.blob.models.PublicAccessType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. Default value is None. - :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _default_encryption_scope = None - _prevent_encryption_scope_override = None - if container_cpk_scope_info is not None: - _default_encryption_scope = container_cpk_scope_info.default_encryption_scope - _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - - _request = build_create_request( - url=self._config.url, - timeout=timeout, - metadata=metadata, - access=access, - request_id_parameter=request_id_parameter, - default_encryption_scope=_default_encryption_scope, - prevent_encryption_scope_override=_prevent_encryption_scope_override, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_properties( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> None: - """returns all user-defined metadata and system properties for the specified container. The data - returned does not include the container's list of blobs. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_properties_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-blob-public-access"] = self._deserialize( - "str", response.headers.get("x-ms-blob-public-access") - ) - response_headers["x-ms-has-immutability-policy"] = self._deserialize( - "bool", response.headers.get("x-ms-has-immutability-policy") - ) - response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) - response_headers["x-ms-default-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-default-encryption-scope") - ) - response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( - "bool", response.headers.get("x-ms-deny-encryption-scope-override") - ) - response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( - "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """operation marks the specified container for deletion. The container and any blobs contained - within it are later deleted during garbage collection. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - restype=restype, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_metadata( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """operation sets one or more user-defined name-value pairs for the specified container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - - _request = build_set_metadata_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - metadata=metadata, - if_modified_since=_if_modified_since, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_access_policy( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - **kwargs: Any - ) -> List[_models.SignedIdentifier]: - """gets the permissions for the specified container. The permissions indicate whether container - data may be accessed publicly. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :return: list of SignedIdentifier or the result of cls(response) - :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) - - _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - - _request = build_get_access_policy_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-blob-public-access"] = self._deserialize( - "str", response.headers.get("x-ms-blob-public-access") - ) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def set_access_policy( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - access: Optional[Union[str, _models.PublicAccessType]] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - container_acl: Optional[List[_models.SignedIdentifier]] = None, - **kwargs: Any - ) -> None: - """sets the permissions for the specified container. The permissions indicate whether blobs in a - container may be accessed publicly. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param access: Specifies whether data in the container may be accessed publicly and the level - of access. Known values are: "container" and "blob". Default value is None. - :type access: str or ~azure.storage.blob.models.PublicAccessType - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param container_acl: the acls for the container. Default value is None. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} - if container_acl is not None: - _content = self._serialize.body( - container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt - ) - else: - _content = None - - _request = build_set_access_policy_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - access=access, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def restore( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - deleted_container_name: Optional[str] = None, - deleted_container_version: Optional[str] = None, - **kwargs: Any - ) -> None: - """Restores a previously-deleted container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. Default value is None. - :type deleted_container_name: str - :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. Default value is None. - :type deleted_container_version: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_restore_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - deleted_container_name=deleted_container_name, - deleted_container_version=deleted_container_version, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def rename( # pylint: disable=inconsistent-return-statements - self, - source_container_name: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - source_lease_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """Renames an existing container. - - :param source_container_name: Required. Specifies the name of the container to rename. - Required. - :type source_container_name: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. Default value is None. - :type source_lease_id: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_rename_request( - url=self._config.url, - source_container_name=source_container_name, - timeout=timeout, - request_id_parameter=request_id_parameter, - source_lease_id=source_lease_id, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def submit_batch( - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> Iterator[bytes]: - """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: Iterator[bytes] or the result of cls(response) - :rtype: Iterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: str = kwargs.pop( - "multipart_content_type", _headers.pop("Content-Type", "application/xml") - ) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _content = body - - _request = build_submit_batch_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - multipart_content_type=multipart_content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def filter_blobs( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any - ) -> _models.FilterBlobSegment: - """The Filter Blobs operation enables callers to list blobs in a container whose tags match a - given search expression. Filter blobs searches within the given container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. Default value is None. - :type where: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] - :return: FilterBlobSegment or the result of cls(response) - :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) - - _request = build_filter_blobs_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - where=where, - marker=marker, - maxresults=maxresults, - include=include, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def acquire_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a - lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. Default value is None. - :type duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_acquire_lease_request( - url=self._config.url, - timeout=timeout, - duration=duration, - proposed_lease_id=proposed_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def release_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_release_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def renew_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_renew_lease_request( - url=self._config.url, - lease_id=lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def break_lease( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - break_period: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param break_period: For a break operation, proposed duration the lease should continue before - it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining on the lease is used. A new - lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a - fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease - breaks immediately. Default value is None. - :type break_period: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_break_lease_request( - url=self._config.url, - timeout=timeout, - break_period=break_period, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def change_lease( # pylint: disable=inconsistent-return-statements - self, - lease_id: str, - proposed_lease_id: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """[Update] establishes and manages a lock on a container for delete operations. The lock duration - can be 15 to 60 seconds, or can be infinite. - - :param lease_id: Specifies the current lease ID on the resource. Required. - :type lease_id: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Required. - :type proposed_lease_id: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_change_lease_request( - url=self._config.url, - lease_id=lease_id, - proposed_lease_id=proposed_lease_id, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - comp=comp, - restype=restype, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def list_blob_flat_segment( - self, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsFlatSegmentResponse: - """[Update] The List Blobs operation returns a list of the blobs under the specified container. - - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsFlatSegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_flat_segment_request( - url=self._config.url, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list_blob_hierarchy_segment( - self, - delimiter: str, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsHierarchySegmentResponse: - """[Update] The List Blobs operation returns a list of the blobs under the specified container. - - :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose names begin with - the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. Required. - :type delimiter: str - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsHierarchySegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_hierarchy_segment_request( - url=self._config.url, - delimiter=delimiter, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py deleted file mode 100644 index a280a9f3048d..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py +++ /dev/null @@ -1,2185 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - url: str, - *, - content_length: int, - blob_content_length: int, - timeout: Optional[int] = None, - tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, - blob_content_type: Optional[str] = None, - blob_content_encoding: Optional[str] = None, - blob_content_language: Optional[str] = None, - blob_content_md5: Optional[bytes] = None, - blob_cache_control: Optional[str] = None, - metadata: Optional[Dict[str, str]] = None, - lease_id: Optional[str] = None, - blob_content_disposition: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if tier is not None: - _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") - if blob_content_type is not None: - _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") - if blob_content_encoding is not None: - _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( - "blob_content_encoding", blob_content_encoding, "str" - ) - if blob_content_language is not None: - _headers["x-ms-blob-content-language"] = _SERIALIZER.header( - "blob_content_language", blob_content_language, "str" - ) - if blob_content_md5 is not None: - _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") - if blob_cache_control is not None: - _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") - if metadata is not None: - _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if blob_content_disposition is not None: - _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( - "blob_content_disposition", blob_content_disposition, "str" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") - if blob_sequence_number is not None: - _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if blob_tags_string is not None: - _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") - if immutability_policy_expiry is not None: - _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( - "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" - ) - if immutability_policy_mode is not None: - _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( - "immutability_policy_mode", immutability_policy_mode, "str" - ) - if legal_hold is not None: - _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_upload_pages_request( - url: str, - *, - content_length: int, - content: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_sequence_number_less_than_or_equal_to: Optional[int] = None, - if_sequence_number_less_than: Optional[int] = None, - if_sequence_number_equal_to: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if transactional_content_md5 is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_md5", transactional_content_md5, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if range is not None: - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_sequence_number_less_than_or_equal_to is not None: - _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( - "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" - ) - if if_sequence_number_less_than is not None: - _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( - "if_sequence_number_less_than", if_sequence_number_less_than, "int" - ) - if if_sequence_number_equal_to is not None: - _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( - "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" - ) - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_clear_pages_request( - url: str, - *, - content_length: int, - timeout: Optional[int] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_sequence_number_less_than_or_equal_to: Optional[int] = None, - if_sequence_number_less_than: Optional[int] = None, - if_sequence_number_equal_to: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if range is not None: - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_sequence_number_less_than_or_equal_to is not None: - _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( - "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" - ) - if if_sequence_number_less_than is not None: - _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( - "if_sequence_number_less_than", if_sequence_number_less_than, "int" - ) - if if_sequence_number_equal_to is not None: - _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( - "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" - ) - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_upload_pages_from_url_request( - url: str, - *, - source_url: str, - source_range: str, - content_length: int, - range: str, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - lease_id: Optional[str] = None, - if_sequence_number_less_than_or_equal_to: Optional[int] = None, - if_sequence_number_less_than: Optional[int] = None, - if_sequence_number_equal_to: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") - _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") - _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") - if source_content_md5 is not None: - _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") - if source_contentcrc64 is not None: - _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( - "source_contentcrc64", source_contentcrc64, "bytearray" - ) - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_sequence_number_less_than_or_equal_to is not None: - _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( - "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" - ) - if if_sequence_number_less_than is not None: - _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( - "if_sequence_number_less_than", if_sequence_number_less_than, "int" - ) - if if_sequence_number_equal_to is not None: - _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( - "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" - ) - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if copy_source_authorization is not None: - _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( - "copy_source_authorization", copy_source_authorization, "str" - ) - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_page_ranges_request( - url: str, - *, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - - # Construct headers - if range is not None: - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_page_ranges_diff_request( - url: str, - *, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - prevsnapshot: Optional[str] = None, - prev_snapshot_url: Optional[str] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if snapshot is not None: - _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if prevsnapshot is not None: - _params["prevsnapshot"] = _SERIALIZER.query("prevsnapshot", prevsnapshot, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - - # Construct headers - if prev_snapshot_url is not None: - _headers["x-ms-previous-snapshot-url"] = _SERIALIZER.header("prev_snapshot_url", prev_snapshot_url, "str") - if range is not None: - _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_resize_request( - url: str, - *, - blob_content_length: int, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, - encryption_scope: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if encryption_scope is not None: - _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_sequence_number_request( - url: str, - *, - sequence_number_action: Union[str, _models.SequenceNumberActionType], - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-sequence-number-action"] = _SERIALIZER.header( - "sequence_number_action", sequence_number_action, "str" - ) - if blob_sequence_number is not None: - _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_copy_incremental_request( - url: str, - *, - copy_source: str, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_tags: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_tags is not None: - _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") - _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -class PageBlobOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`page_blob` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def create( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - blob_content_length: int, - timeout: Optional[int] = None, - tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, - metadata: Optional[Dict[str, str]] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - blob_tags_string: Optional[str] = None, - immutability_policy_expiry: Optional[datetime.datetime] = None, - immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, - legal_hold: Optional[bool] = None, - blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Create operation creates a new page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. Required. - :type blob_content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", - "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. - :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier - :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. - If no name-value pairs are specified, the operation will copy the metadata from the source blob - or file to the destination blob. If one or more name-value pairs are specified, the destination - blob is created with the specified metadata, and metadata is not copied from the source blob or - file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming - rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. Default value is None. - :type metadata: dict[str, str] - :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled - value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. Default value is 0. - :type blob_sequence_number: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default - value is None. - :type blob_tags_string: str - :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy - is set to expire. Default value is None. - :type immutability_policy_expiry: ~datetime.datetime - :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. - Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. - :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode - :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. - :type legal_hold: bool - :param blob_http_headers: Parameter group. Default value is None. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _blob_content_type = None - _blob_content_encoding = None - _blob_content_language = None - _blob_content_md5 = None - _blob_cache_control = None - _lease_id = None - _blob_content_disposition = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_type = blob_http_headers.blob_content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_create_request( - url=self._config.url, - content_length=content_length, - blob_content_length=blob_content_length, - timeout=timeout, - tier=tier, - blob_content_type=_blob_content_type, - blob_content_encoding=_blob_content_encoding, - blob_content_language=_blob_content_language, - blob_content_md5=_blob_content_md5, - blob_cache_control=_blob_cache_control, - metadata=metadata, - lease_id=_lease_id, - blob_content_disposition=_blob_content_disposition, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_sequence_number=blob_sequence_number, - request_id_parameter=request_id_parameter, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - blob_type=blob_type, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def upload_pages( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - body: IO[bytes], - transactional_content_md5: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Pages operation writes a range of pages to a page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. Default value is None. - :type transactional_content_md5: bytes - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_upload_pages_request( - url=self._config.url, - content_length=content_length, - transactional_content_md5=transactional_content_md5, - transactional_content_crc64=transactional_content_crc64, - timeout=timeout, - range=range, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - page_write=page_write, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def clear_pages( # pylint: disable=inconsistent-return-statements - self, - content_length: int, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Clear Pages operation clears a set of pages from a page blob. - - :param content_length: The length of the request. Required. - :type content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_clear_pages_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - range=range, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - page_write=page_write, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def upload_pages_from_url( # pylint: disable=inconsistent-return-statements - self, - source_url: str, - source_range: str, - content_length: int, - range: str, - source_content_md5: Optional[bytes] = None, - source_contentcrc64: Optional[bytes] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - copy_source_authorization: Optional[str] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Upload Pages operation writes a range of pages to a page blob where the contents are read - from a URL. - - :param source_url: Specify a URL to the copy source. Required. - :type source_url: str - :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. Required. - :type source_range: str - :param content_length: The length of the request. Required. - :type content_length: int - :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. Required. - :type range: str - :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. Default value is None. - :type source_content_md5: bytes - :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. Default value is None. - :type source_contentcrc64: bytes - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid - OAuth access token to copy source. Default value is None. - :type copy_source_authorization: str - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. Default value is None. - :type sequence_number_access_conditions: - ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.blob.models.SourceModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) - page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _lease_id = None - _if_sequence_number_less_than_or_equal_to = None - _if_sequence_number_less_than = None - _if_sequence_number_equal_to = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if sequence_number_access_conditions is not None: - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_less_than_or_equal_to = ( - sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - ) - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - - _request = build_upload_pages_from_url_request( - url=self._config.url, - source_url=source_url, - source_range=source_range, - content_length=content_length, - range=range, - source_content_md5=source_content_md5, - source_contentcrc64=source_contentcrc64, - timeout=timeout, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - lease_id=_lease_id, - if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, - if_sequence_number_less_than=_if_sequence_number_less_than, - if_sequence_number_equal_to=_if_sequence_number_equal_to, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - request_id_parameter=request_id_parameter, - copy_source_authorization=copy_source_authorization, - comp=comp, - page_write=page_write, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_page_ranges( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.PageList: - """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot - of a page blob. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: PageList or the result of cls(response) - :rtype: ~azure.storage.blob.models.PageList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - cls: ClsType[_models.PageList] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_page_ranges_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - range=range, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - marker=marker, - maxresults=maxresults, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("PageList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_page_ranges_diff( - self, - snapshot: Optional[str] = None, - timeout: Optional[int] = None, - prevsnapshot: Optional[str] = None, - prev_snapshot_url: Optional[str] = None, - range: Optional[str] = None, - request_id_parameter: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> _models.PageList: - """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that - were changed between target blob and previous snapshot. - - :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, - specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating - a Snapshot of a Blob.`. Default value is None. - :type snapshot: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a - DateTime value that specifies that the response will contain only pages that were changed - between target blob and previous snapshot. Changed pages include both updated and cleared - pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is - the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. Default value is None. - :type prevsnapshot: str - :param prev_snapshot_url: Optional. This header is only supported in service versions - 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The - response will only contain pages that were changed between the target blob and its previous - snapshot. Default value is None. - :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. Default value is None. - :type range: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: PageList or the result of cls(response) - :rtype: ~azure.storage.blob.models.PageList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - cls: ClsType[_models.PageList] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_page_ranges_diff_request( - url=self._config.url, - snapshot=snapshot, - timeout=timeout, - prevsnapshot=prevsnapshot, - prev_snapshot_url=prev_snapshot_url, - range=range, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - marker=marker, - maxresults=maxresults, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["x-ms-blob-content-length"] = self._deserialize( - "int", response.headers.get("x-ms-blob-content-length") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("PageList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def resize( # pylint: disable=inconsistent-return-statements - self, - blob_content_length: int, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - cpk_scope_info: Optional[_models.CpkScopeInfo] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Resize the Blob. - - :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. Required. - :type blob_content_length: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. Default value is None. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - _encryption_scope = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_resize_request( - url=self._config.url, - blob_content_length=blob_content_length, - timeout=timeout, - lease_id=_lease_id, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, - encryption_scope=_encryption_scope, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def update_sequence_number( # pylint: disable=inconsistent-return-statements - self, - sequence_number_action: Union[str, _models.SequenceNumberActionType], - timeout: Optional[int] = None, - blob_sequence_number: int = 0, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Update the sequence number of the blob. - - :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the - request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. Known values are: "max", "update", and "increment". - Required. - :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled - value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. Default value is 0. - :type blob_sequence_number: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_update_sequence_number_request( - url=self._config.url, - sequence_number_action=sequence_number_action, - timeout=timeout, - lease_id=_lease_id, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - blob_sequence_number=blob_sequence_number, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-blob-sequence-number"] = self._deserialize( - "int", response.headers.get("x-ms-blob-sequence-number") - ) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def copy_incremental( # pylint: disable=inconsistent-return-statements - self, - copy_source: str, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """The Copy Incremental operation copies a snapshot of the source page blob to a destination page - blob. The snapshot is copied such that only the differential changes between the previously - copied snapshot are transferred to the destination. The copied snapshots are complete copies of - the original snapshot and can be read or copied from as usual. This API is supported since REST - version 2016-05-31. - - :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of - up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it - would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. Required. - :type copy_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_copy_incremental_request( - url=self._config.url, - copy_source=copy_source, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - if_match=_if_match, - if_none_match=_if_none_match, - if_tags=_if_tags, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py deleted file mode 100644 index 2e2a84dc524d..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py +++ /dev/null @@ -1,1053 +0,0 @@ -# pylint: disable=too-many-lines,too-many-statements -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_set_properties_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_get_properties_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_statistics_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_containers_segment_request( - url: str, - *, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if prefix is not None: - _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_user_delegation_key_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_submit_batch_request( - url: str, - *, - content_length: int, - content: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") - if multipart_content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_filter_blobs_request( - url: str, - *, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if where is not None: - _params["where"] = _SERIALIZER.query("where", where, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if maxresults is not None: - _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class ServiceOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.blob.AzureBlobStorage`'s - :attr:`service` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def set_properties( # pylint: disable=inconsistent-return-statements - self, - storage_service_properties: _models.StorageServiceProperties, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets properties for a storage account's Blob service endpoint, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param storage_service_properties: The StorageService properties. Required. - :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) - - _request = build_set_properties_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_properties( - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> _models.StorageServiceProperties: - """gets the properties of a storage account's Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: StorageServiceProperties or the result of cls(response) - :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_statistics( - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> _models.StorageServiceStats: - """Retrieves statistics related to replication for the Blob service. It is only available on the - secondary location endpoint when read-access geo-redundant replication is enabled for the - storage account. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: StorageServiceStats or the result of cls(response) - :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) - - _request = build_get_statistics_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list_containers_segment( - self, - prefix: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListContainersSegmentResponse: - """The List Containers Segment operation returns a list of the containers under the specified - account. - - :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. Default value is None. - :type prefix: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. Default value is None. - :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListContainersSegmentResponse or the result of cls(response) - :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_containers_segment_request( - url=self._config.url, - prefix=prefix, - marker=marker, - maxresults=maxresults, - include=include, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_user_delegation_key( - self, - key_info: _models.KeyInfo, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.UserDelegationKey: - """Retrieves a user delegation key for the Blob service. This is only a valid operation when using - bearer token authentication. - - :param key_info: Key information. Required. - :type key_info: ~azure.storage.blob.models.KeyInfo - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: UserDelegationKey or the result of cls(response) - :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) - cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) - - _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) - - _request = build_get_user_delegation_key_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_account_info( # pylint: disable=inconsistent-return-statements - self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any - ) -> None: - """Returns the sku name and account kind. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_account_info_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) - response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) - response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def submit_batch( - self, - content_length: int, - body: IO[bytes], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> Iterator[bytes]: - """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - - :param content_length: The length of the request. Required. - :type content_length: int - :param body: Initial data. Required. - :type body: IO[bytes] - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: Iterator[bytes] or the result of cls(response) - :rtype: Iterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) - multipart_content_type: str = kwargs.pop( - "multipart_content_type", _headers.pop("Content-Type", "application/xml") - ) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _content = body - - _request = build_submit_batch_request( - url=self._config.url, - content_length=content_length, - timeout=timeout, - request_id_parameter=request_id_parameter, - comp=comp, - multipart_content_type=multipart_content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def filter_blobs( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - where: Optional[str] = None, - marker: Optional[str] = None, - maxresults: Optional[int] = None, - include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, - **kwargs: Any - ) -> _models.FilterBlobSegment: - """The Filter Blobs operation enables callers to list blobs across all containers whose tags match - a given search expression. Filter blobs searches across all containers within a storage - account but can be scoped within the expression to a single container. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. Default value is None. - :type where: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param maxresults: Specifies the maximum number of containers to return. If the request does - not specify maxresults, or specifies a value greater than 5000, the server will return up to - 5000 items. Note that if the listing operation crosses a partition boundary, then the service - will return a continuation token for retrieving the remainder of the results. For this reason, - it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. Default value is None. - :type maxresults: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] - :return: FilterBlobSegment or the result of cls(response) - :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) - - _request = build_filter_blobs_request( - url=self._config.url, - timeout=timeout, - request_id_parameter=request_id_parameter, - where=where, - marker=marker, - maxresults=maxresults, - include=include, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/py.typed b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_lease.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_lease.py deleted file mode 100644 index b8b5684d7c23..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_lease.py +++ /dev/null @@ -1,341 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -import uuid - -from typing import Any, Optional, Union, TYPE_CHECKING - -from azure.core.exceptions import HttpResponseError -from azure.core.tracing.decorator import distributed_trace - -from ._shared.response_handlers import process_storage_error, return_response_headers -from ._serialize import get_modify_conditions - -if TYPE_CHECKING: - from azure.storage.blob import BlobClient, ContainerClient - from datetime import datetime - - -class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword - """Creates a new BlobLeaseClient. - - This client provides lease operations on a BlobClient or ContainerClient. - :param client: The client of the blob or container to lease. - :type client: Union[BlobClient, ContainerClient] - :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be - specified in order to acquire a new lease, or break one. - :type lease_id: Optional[str] - """ - - id: str - """The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired.""" - etag: Optional[str] - """The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified.""" - last_modified: Optional["datetime"] - """The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified.""" - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs - self, client: Union["BlobClient", "ContainerClient"], - lease_id: Optional[str] = None - ) -> None: - self.id = lease_id or str(uuid.uuid4()) - self.last_modified = None - self.etag = None - if hasattr(client, 'blob_name'): - self._client = client._client.blob - elif hasattr(client, 'container_name'): - self._client = client._client.container - else: - raise TypeError("Lease must use either BlobClient or ContainerClient.") - - def __enter__(self): - return self - - def __exit__(self, *args): - self.release() - - @distributed_trace - def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: - """Requests a new lease. - - If the container does not have an active lease, the Blob service creates a - lease on the container and returns a new lease ID. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = self._client.acquire_lease( - timeout=kwargs.pop('timeout', None), - duration=lease_duration, - proposed_lease_id=self.id, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - self.etag = response.get('etag') - - @distributed_trace - def renew(self, **kwargs: Any) -> None: - """Renews the lease. - - The lease can be renewed if the lease ID specified in the - lease client matches that associated with the container or blob. Note that - the lease may be renewed even if it has expired as long as the container - or blob has not been leased again since the expiration of that lease. When you - renew a lease, the lease duration clock resets. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = self._client.renew_lease( - lease_id=self.id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace - def release(self, **kwargs: Any) -> None: - """Release the lease. - - The lease may be released if the client lease id specified matches - that associated with the container or blob. Releasing the lease allows another client - to immediately acquire the lease for the container or blob as soon as the release is complete. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = self._client.release_lease( - lease_id=self.id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace - def change(self, proposed_lease_id: str, **kwargs: Any) -> None: - """Change the lease ID of an active lease. - - :param str proposed_lease_id: - Proposed lease ID, in a GUID string format. The Blob service returns 400 - (Invalid request) if the proposed lease ID is not in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = self._client.change_lease( - lease_id=self.id, - proposed_lease_id=proposed_lease_id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace - def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: - """Break the lease, if the container or blob has an active lease. - - Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; - the request is not required to specify a matching lease ID. When a lease - is broken, the lease break period is allowed to elapse, during which time - no lease operation except break and release can be performed on the container or blob. - When a lease is successfully broken, the response indicates the interval - in seconds until a new lease can be acquired. - - :param int lease_break_period: - This is the proposed duration of seconds that the lease - should continue before it is broken, between 0 and 60 seconds. This - break period is only used if it is shorter than the time remaining - on the lease. If longer, the time remaining on the lease is used. - A new lease will not be available before the break period has - expired, but the lease may be held for longer than the break - period. If this header does not appear with a break - operation, a fixed-duration lease breaks after the remaining lease - period elapses, and an infinite lease breaks immediately. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: Approximate time remaining in the lease period, in seconds. - :rtype: int - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response = self._client.break_lease( - timeout=kwargs.pop('timeout', None), - break_period=lease_break_period, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return response.get('lease_time') # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_list_blobs_helper.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_list_blobs_helper.py deleted file mode 100644 index 84a6159651b9..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_list_blobs_helper.py +++ /dev/null @@ -1,328 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Any, Callable, cast, List, Optional, Tuple, Union -from urllib.parse import unquote - -from azure.core.exceptions import HttpResponseError -from azure.core.paging import ItemPaged, PageIterator - -from ._deserialize import ( - get_blob_properties_from_generated_code, - load_many_xml_nodes, - load_xml_int, - load_xml_string, - parse_tags -) -from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem -from ._generated._serialization import Deserializer -from ._models import BlobProperties, FilteredBlob -from ._shared.models import DictMixin -from ._shared.response_handlers import ( - process_storage_error, - return_context_and_deserialized, - return_raw_deserialized -) - - -class IgnoreListBlobsDeserializer(Deserializer): - def __call__(self, target_obj, response_data, content_type=None): # pylint: disable=inconsistent-return-statements - if target_obj == "ListBlobsFlatSegmentResponse": - return None - super().__call__(target_obj, response_data, content_type) - - -class BlobPropertiesPaged(PageIterator): - """An Iterable of Blob properties.""" - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - container: Optional[str] - """The container that the blobs are listed from.""" - delimiter: Optional[str] - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - - def __init__( - self, command: Callable, - container: str, - prefix: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - delimiter: Optional[str] = None, - location_mode: Optional[str] = None, - ) -> None: - super(BlobPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.container = container - self.delimiter = delimiter - self.current_page = None - self.location_mode = location_mode - - def _get_next_cb(self, continuation_token): - try: - return self._command( - prefix=self.prefix, - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = cast(Tuple[Optional[str], Any], get_next_return) - self.service_endpoint = self._response.service_endpoint - self.prefix = self._response.prefix - self.marker = self._response.marker - self.results_per_page = self._response.max_results - self.container = self._response.container_name - self.current_page = [self._build_item(item) for item in self._response.segment.blob_items] - - return self._response.next_marker or None, self.current_page - - def _build_item(self, item: Union[BlobItemInternal, BlobProperties]) -> BlobProperties: - if isinstance(item, BlobProperties): - return item - if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container # type: ignore [assignment] - return blob - return item - - -class BlobNamesPaged(PageIterator): - """An Iterable of Blob names.""" - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of blobs to retrieve per call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - container: Optional[str] - """The container that the blobs are listed from.""" - delimiter: Optional[str] - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - - def __init__( - self, command: Callable, - container: Optional[str] = None, - prefix: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - location_mode: Optional[str] = None - ) -> None: - super(BlobNamesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.container = container - self.current_page = None - self.location_mode = location_mode - - def _get_next_cb(self, continuation_token): - try: - return self._command( - prefix=self.prefix, - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_raw_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.get('ServiceEndpoint') - self.prefix = load_xml_string(self._response, 'Prefix') - self.marker = load_xml_string(self._response, 'Marker') - self.results_per_page = load_xml_int(self._response, 'MaxResults') - self.container = self._response.get('ContainerName') - - blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') - self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] - - next_marker = load_xml_string(self._response, 'NextMarker') - return next_marker or None, self.current_page - - -class BlobPrefixPaged(BlobPropertiesPaged): - def __init__(self, *args, **kwargs): - super(BlobPrefixPaged, self).__init__(*args, **kwargs) - self.name = self.prefix - - def _extract_data_cb(self, get_next_return): - continuation_token, _ = super(BlobPrefixPaged, self)._extract_data_cb(get_next_return) - self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items - self.current_page = [self._build_item(item) for item in self.current_page] - self.delimiter = self._response.delimiter - - return continuation_token, self.current_page - - def _build_item(self, item): - item = super(BlobPrefixPaged, self)._build_item(item) - if isinstance(item, GenBlobPrefix): - if item.name.encoded: - name = unquote(item.name.content) - else: - name = item.name.content - return BlobPrefix( - self._command, - container=self.container, - prefix=name, - results_per_page=self.results_per_page, - location_mode=self.location_mode) - return item - - -class BlobPrefix(ItemPaged, DictMixin): - """An Iterable of Blob properties. - - Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory.""" - - name: str - """The prefix, or "directory name" of the blob.""" - service_endpoint: Optional[str] - """The service URL.""" - prefix: str - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - next_marker: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: str - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - delimiter: str - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - container: str - """The name of the container.""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) - self.name = kwargs.get('prefix') # type: ignore [assignment] - self.prefix = kwargs.get('prefix') # type: ignore [assignment] - self.results_per_page = kwargs.get('results_per_page') - self.container = kwargs.get('container') # type: ignore [assignment] - self.delimiter = kwargs.get('delimiter') # type: ignore [assignment] - self.location_mode = kwargs.get('location_mode') # type: ignore [assignment] - - -class FilteredBlobPaged(PageIterator): - """An Iterable of Blob properties.""" - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - command: Callable - """Function to retrieve the next page of items.""" - container: Optional[str] - """The name of the container.""" - - def __init__( - self, command: Callable, - container: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - location_mode: Optional[str] = None - ) -> None: - super(FilteredBlobPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.marker = continuation_token - self.results_per_page = results_per_page - self.container = container - self.current_page = None - self.location_mode = location_mode - - def _get_next_cb(self, continuation_token): - try: - return self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.service_endpoint - self.marker = self._response.next_marker - self.current_page = [self._build_item(item) for item in self._response.blobs] - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_item(item): - if isinstance(item, FilterBlobItem): - tags = parse_tags(item.tags) - blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) - return blob - return item diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_model_base.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_model_base.py new file mode 100644 index 000000000000..12ad7f29c71e --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_model_base.py @@ -0,0 +1,1158 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, arguments-differ, signature-differs, broad-except, too-many-lines + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + return self._data.popitem() + + def clear(self) -> None: + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: disable=unused-argument + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") # pylint: disable=no-member + for k, v in mro_class.__annotations__.items() # pylint: disable=no-member + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) # pylint: disable=no-value-for-parameter + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): # pylint: disable=no-member + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore # pylint: disable=no-member + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): # pylint: disable=no-member + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be JSONify using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915, R0912 + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py deleted file mode 100644 index a6af589b2c82..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py +++ /dev/null @@ -1,1506 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-few-public-methods, too-many-instance-attributes -# pylint: disable=super-init-not-called, too-many-lines - -from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Union, TYPE_CHECKING - -from azure.core import CaseInsensitiveEnumMeta -from azure.core.paging import PageIterator -from azure.core.exceptions import HttpResponseError - -from ._shared import decode_base64_to_bytes -from ._shared.response_handlers import return_context_and_deserialized, process_storage_error -from ._shared.models import DictMixin, get_enum_value -from ._generated.models import AccessPolicy as GenAccessPolicy -from ._generated.models import ArrowField -from ._generated.models import CorsRule as GeneratedCorsRule -from ._generated.models import Logging as GeneratedLogging -from ._generated.models import Metrics as GeneratedMetrics -from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy -from ._generated.models import StaticWebsite as GeneratedStaticWebsite - -if TYPE_CHECKING: - from datetime import datetime - from ._generated.models import PageList - -# Parse a generated PageList into a single list of PageRange sorted by start. -def parse_page_list(page_list: "PageList") -> List["PageRange"]: - - page_ranges = page_list.page_range - clear_ranges = page_list.clear_range - - if page_ranges is None: - raise ValueError("PageList's 'page_range' is malformed or None.") - if clear_ranges is None: - raise ValueError("PageList's 'clear_ranges' is malformed or None.") - - ranges = [] - p_i, c_i = 0, 0 - - # Combine page ranges and clear ranges into single list, sorted by start - while p_i < len(page_ranges) and c_i < len(clear_ranges): - p, c = page_ranges[p_i], clear_ranges[c_i] - - if p.start < c.start: - ranges.append( - PageRange(start=p.start, end=p.end, cleared=False) - ) - p_i += 1 - else: - ranges.append( - PageRange(start=c.start, end=c.end, cleared=True) - ) - c_i += 1 - - # Grab remaining elements in either list - ranges += [PageRange(start=r.start, end=r.end, cleared=False) for r in page_ranges[p_i:]] - ranges += [PageRange(start=r.start, end=r.end, cleared=True) for r in clear_ranges[c_i:]] - - return ranges - - -class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - - BLOCKBLOB = "BlockBlob" - PAGEBLOB = "PageBlob" - APPENDBLOB = "AppendBlob" - - -class BlockState(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Block blob block types.""" - - COMMITTED = 'Committed' #: Committed blocks. - LATEST = 'Latest' #: Latest blocks. - UNCOMMITTED = 'Uncommitted' #: Uncommitted blocks. - - -class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ - Specifies the blob tier to set the blob to. This is only applicable for - block blobs on standard storage accounts. - """ - - ARCHIVE = 'Archive' #: Archive - COOL = 'Cool' #: Cool - COLD = 'Cold' #: Cold - HOT = 'Hot' #: Hot - - -class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ - Specifies the page blob tier to set the blob to. This is only applicable to page - blobs on premium storage accounts. Please take a look at: - https://docs.microsoft.com/en-us/azure/storage/storage-premium-storage#scalability-and-performance-targets - for detailed information on the corresponding IOPS and throughput per PageBlobTier. - """ - - P4 = 'P4' #: P4 Tier - P6 = 'P6' #: P6 Tier - P10 = 'P10' #: P10 Tier - P15 = 'P15' #: P15 Tier - P20 = 'P20' #: P20 Tier - P30 = 'P30' #: P30 Tier - P40 = 'P40' #: P40 Tier - P50 = 'P50' #: P50 Tier - P60 = 'P60' #: P60 Tier - - -class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specifies the quick query input/output dialect.""" - - DELIMITEDTEXT = 'DelimitedTextDialect' - DELIMITEDJSON = 'DelimitedJsonDialect' - PARQUET = 'ParquetDialect' - - -class SequenceNumberAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Sequence number actions.""" - - INCREMENT = 'increment' - """ - Increments the value of the sequence number by 1. If specifying this option, - do not include the x-ms-blob-sequence-number header. - """ - - MAX = 'max' - """ - Sets the sequence number to be the higher of the value included with the - request and the value currently stored for the blob. - """ - - UPDATE = 'update' - """Sets the sequence number to the value included with the request.""" - - -class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ - Specifies whether data in the container may be accessed publicly and the level of access. - """ - - OFF = 'off' - """ - Specifies that there is no public read access for both the container and blobs within the container. - Clients cannot enumerate the containers within the storage account as well as the blobs within the container. - """ - - BLOB = 'blob' - """ - Specifies public read access for blobs. Blob data within this container can be read - via anonymous request, but container data is not available. Clients cannot enumerate - blobs within the container via anonymous request. - """ - - CONTAINER = 'container' - """ - Specifies full public read access for container and blob data. Clients can enumerate - blobs within the container via anonymous request, but cannot enumerate containers - within the storage account. - """ - - -class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ - Specifies the immutability policy mode to set on the blob. - "Mutable" can only be returned by service, don't set to "Mutable". - """ - - UNLOCKED = "Unlocked" - LOCKED = "Locked" - MUTABLE = "Mutable" - - -class RetentionPolicy(GeneratedRetentionPolicy): - """The retention policy which determines how long the associated data should - persist. - - :param bool enabled: - Indicates whether a retention policy is enabled for the storage service. - The default value is False. - :param Optional[int] days: - Indicates the number of days that metrics or logging or - soft-deleted data should be retained. All data older than this value will - be deleted. If enabled=True, the number of days must be specified. - """ - - enabled: bool = False - days: Optional[int] = None - - def __init__(self, enabled: bool = False, days: Optional[int] = None) -> None: - super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) - if self.enabled and (self.days is None): - raise ValueError("If policy is enabled, 'days' must be specified.") - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - enabled=generated.enabled, - days=generated.days, - ) - - -class BlobAnalyticsLogging(GeneratedLogging): - """Azure Analytics Logging settings. - - :keyword str version: - The version of Storage Analytics to configure. The default value is 1.0. - :keyword bool delete: - Indicates whether all delete requests should be logged. The default value is `False`. - :keyword bool read: - Indicates whether all read requests should be logged. The default value is `False`. - :keyword bool write: - Indicates whether all write requests should be logged. The default value is `False`. - :keyword ~azure.storage.blob.RetentionPolicy retention_policy: - Determines how long the associated data should persist. If not specified the retention - policy will be disabled by default. - """ - - version: str = '1.0' - """The version of Storage Analytics to configure.""" - delete: bool = False - """Indicates whether all delete requests should be logged.""" - read: bool = False - """Indicates whether all read requests should be logged.""" - write: bool = False - """Indicates whether all write requests should be logged.""" - retention_policy: RetentionPolicy = RetentionPolicy() - """Determines how long the associated data should persist.""" - - def __init__(self, **kwargs: Any) -> None: - self.version = kwargs.get('version', '1.0') - self.delete = kwargs.get('delete', False) - self.read = kwargs.get('read', False) - self.write = kwargs.get('write', False) - self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - version=generated.version, - delete=generated.delete, - read=generated.read, - write=generated.write, - retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access - ) - - -class Metrics(GeneratedMetrics): - """A summary of request statistics grouped by API in hour or minute aggregates - for blobs. - - :keyword str version: - The version of Storage Analytics to configure. The default value is 1.0. - :keyword bool enabled: - Indicates whether metrics are enabled for the Blob service. - The default value is `False`. - :keyword bool include_apis: - Indicates whether metrics should generate summary statistics for called API operations. - :keyword ~azure.storage.blob.RetentionPolicy retention_policy: - Determines how long the associated data should persist. If not specified the retention - policy will be disabled by default. - """ - - version: str = '1.0' - """The version of Storage Analytics to configure.""" - enabled: bool = False - """Indicates whether metrics are enabled for the Blob service.""" - include_apis: Optional[bool] - """Indicates whether metrics should generate summary statistics for called API operations.""" - retention_policy: RetentionPolicy = RetentionPolicy() - """Determines how long the associated data should persist.""" - - def __init__(self, **kwargs: Any) -> None: - self.version = kwargs.get('version', '1.0') - self.enabled = kwargs.get('enabled', False) - self.include_apis = kwargs.get('include_apis') - self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - version=generated.version, - enabled=generated.enabled, - include_apis=generated.include_apis, - retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access - ) - - -class StaticWebsite(GeneratedStaticWebsite): - """The properties that enable an account to host a static website. - - :keyword bool enabled: - Indicates whether this account is hosting a static website. - The default value is `False`. - :keyword str index_document: - The default name of the index page under each directory. - :keyword str error_document404_path: - The absolute path of the custom 404 page. - :keyword str default_index_document_path: - Absolute path of the default index page. - """ - - enabled: bool = False - """Indicates whether this account is hosting a static website.""" - index_document: Optional[str] - """The default name of the index page under each directory.""" - error_document404_path: Optional[str] - """The absolute path of the custom 404 page.""" - default_index_document_path: Optional[str] - """Absolute path of the default index page.""" - - def __init__(self, **kwargs: Any) -> None: - self.enabled = kwargs.get('enabled', False) - if self.enabled: - self.index_document = kwargs.get('index_document') - self.error_document404_path = kwargs.get('error_document404_path') - self.default_index_document_path = kwargs.get('default_index_document_path') - else: - self.index_document = None - self.error_document404_path = None - self.default_index_document_path = None - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - enabled=generated.enabled, - index_document=generated.index_document, - error_document404_path=generated.error_document404_path, - default_index_document_path=generated.default_index_document_path - ) - - -class CorsRule(GeneratedCorsRule): - """CORS is an HTTP feature that enables a web application running under one - domain to access resources in another domain. Web browsers implement a - security restriction known as same-origin policy that prevents a web page - from calling APIs in a different domain; CORS provides a secure way to - allow one domain (the origin domain) to call APIs in another domain. - - :param list(str) allowed_origins: - A list of origin domains that will be allowed via CORS, or "*" to allow - all domains. The list of must contain at least one entry. Limited to 64 - origin domains. Each allowed origin can have up to 256 characters. - :param list(str) allowed_methods: - A list of HTTP methods that are allowed to be executed by the origin. - The list of must contain at least one entry. For Azure Storage, - permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT. - :keyword list(str) allowed_headers: - Defaults to an empty list. A list of headers allowed to be part of - the cross-origin request. Limited to 64 defined headers and 2 prefixed - headers. Each header can be up to 256 characters. - :keyword list(str) exposed_headers: - Defaults to an empty list. A list of response headers to expose to CORS - clients. Limited to 64 defined headers and two prefixed headers. Each - header can be up to 256 characters. - :keyword int max_age_in_seconds: - The number of seconds that the client/browser should cache a - preflight response. - """ - - allowed_origins: str - """The comma-delimited string representation of the list of origin domains that will be allowed via - CORS, or "*" to allow all domains.""" - allowed_methods: str - """The comma-delimited string representation of the list HTTP methods that are allowed to be executed - by the origin.""" - exposed_headers: str - """The comma-delimited string representation of the list of response headers to expose to CORS clients.""" - allowed_headers: str - """The comma-delimited string representation of the list of headers allowed to be part of the cross-origin - request.""" - max_age_in_seconds: int - """The number of seconds that the client/browser should cache a pre-flight response.""" - - def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwargs: Any) -> None: - self.allowed_origins = ','.join(allowed_origins) - self.allowed_methods = ','.join(allowed_methods) - self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) - self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) - self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) - - @staticmethod - def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GeneratedCorsRule]]: - if rules is None: - return rules - - generated_cors_list = [] - for cors_rule in rules: - generated_cors = GeneratedCorsRule( - allowed_origins=cors_rule.allowed_origins, - allowed_methods=cors_rule.allowed_methods, - allowed_headers=cors_rule.allowed_headers, - exposed_headers=cors_rule.exposed_headers, - max_age_in_seconds=cors_rule.max_age_in_seconds - ) - generated_cors_list.append(generated_cors) - - return generated_cors_list - - @classmethod - def _from_generated(cls, generated): - return cls( - [generated.allowed_origins], - [generated.allowed_methods], - allowed_headers=[generated.allowed_headers], - exposed_headers=[generated.exposed_headers], - max_age_in_seconds=generated.max_age_in_seconds, - ) - - -class ContainerProperties(DictMixin): - """Blob container's properties class. - - Returned ``ContainerProperties`` instances expose these values through a - dictionary interface, for example: ``container_props["last_modified"]``. - Additionally, the container name is available as ``container_props["name"]``.""" - - name: str - """Name of the container.""" - last_modified: "datetime" - """A datetime object representing the last time the container was modified.""" - etag: str - """The ETag contains a value that you can use to perform operations conditionally.""" - lease: "LeaseProperties" - """Stores all the lease information for the container.""" - public_access: Optional[str] - """Specifies whether data in the container may be accessed publicly and the level of access.""" - has_immutability_policy: bool - """Represents whether the container has an immutability policy.""" - has_legal_hold: bool - """Represents whether the container has a legal hold.""" - immutable_storage_with_versioning_enabled: bool - """Represents whether immutable storage with versioning enabled on the container.""" - metadata: Dict[str, Any] - """A dict with name-value pairs to associate with the container as metadata.""" - encryption_scope: Optional["ContainerEncryptionScope"] - """The default encryption scope configuration for the container.""" - deleted: Optional[bool] - """Whether this container was deleted.""" - version: Optional[str] - """The version of a deleted container.""" - - def __init__(self, **kwargs: Any) -> None: - self.name = None # type: ignore [assignment] - self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] - self.etag = kwargs.get('ETag') # type: ignore [assignment] - self.lease = LeaseProperties(**kwargs) - self.public_access = kwargs.get('x-ms-blob-public-access') - self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') # type: ignore [assignment] - self.deleted = None - self.version = None - self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') # type: ignore [assignment] - self.metadata = kwargs.get('metadata') # type: ignore [assignment] - self.encryption_scope = None - self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') # type: ignore [assignment] # pylint: disable=name-too-long - default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') - if default_encryption_scope: - self.encryption_scope = ContainerEncryptionScope( - default_encryption_scope=default_encryption_scope, - prevent_encryption_scope_override=kwargs.get('x-ms-deny-encryption-scope-override', False) - ) - - @classmethod - def _from_generated(cls, generated): # pylint: disable=name-too-long - props = cls() - props.name = generated.name - props.last_modified = generated.properties.last_modified - props.etag = generated.properties.etag - props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access - props.public_access = generated.properties.public_access - props.has_immutability_policy = generated.properties.has_immutability_policy - props.immutable_storage_with_versioning_enabled = generated.properties.is_immutable_storage_with_versioning_enabled # pylint: disable=line-too-long, name-too-long - props.deleted = generated.deleted - props.version = generated.version - props.has_legal_hold = generated.properties.has_legal_hold - props.metadata = generated.metadata - props.encryption_scope = ContainerEncryptionScope._from_generated(generated) #pylint: disable=protected-access - return props - - -class ContainerPropertiesPaged(PageIterator): - """An Iterable of Container properties. - - :param Callable command: Function to retrieve the next page of items. - :param Optional[str] prefix: Filters the results to return only containers whose names - begin with the specified prefix. - :param Optional[int] results_per_page: The maximum number of container names to retrieve per call. - :param Optional[str] continuation_token: An opaque continuation token. - """ - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A container name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results.""" - current_page: List["ContainerProperties"] - """The current page of listed results.""" - - def __init__( - self, command: Callable, - prefix: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None - ) -> None: - super(ContainerPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.location_mode = None - self.current_page = [] - - def _get_next_cb(self, continuation_token): - try: - return self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.service_endpoint - self.prefix = self._response.prefix - self.marker = self._response.marker - self.results_per_page = self._response.max_results - self.current_page = [self._build_item(item) for item in self._response.container_items] - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_item(item): - return ContainerProperties._from_generated(item) # pylint: disable=protected-access - - -class ImmutabilityPolicy(DictMixin): - """Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime expiry_time: - Specifies the date time when the blobs immutability policy is set to expire. - :keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode: - Specifies the immutability policy mode to set on the blob. - Possible values to set include: "Locked", "Unlocked". - "Mutable" can only be returned by service, don't set to "Mutable". - """ - - expiry_time: Optional["datetime"] = None - """Specifies the date time when the blobs immutability policy is set to expire.""" - policy_mode: Optional[str] = None - """Specifies the immutability policy mode to set on the blob.""" - - def __init__(self, **kwargs: Any) -> None: - self.expiry_time = kwargs.pop('expiry_time', None) - self.policy_mode = kwargs.pop('policy_mode', None) - - @classmethod - def _from_generated(cls, generated): - immutability_policy = cls() - immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on - immutability_policy.policy_mode = generated.properties.immutability_policy_mode - return immutability_policy - - -class FilteredBlob(DictMixin): - """Blob info from a Filter Blobs API call.""" - - name: str - """Blob name""" - container_name: Optional[str] - """Container name.""" - tags: Optional[Dict[str, str]] - """Key value pairs of blob tags.""" - - def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('name', None) - self.container_name = kwargs.get('container_name', None) - self.tags = kwargs.get('tags', None) - - -class LeaseProperties(DictMixin): - """Blob Lease Properties.""" - - status: str - """The lease status of the blob. Possible values: locked|unlocked""" - state: str - """Lease state of the blob. Possible values: available|leased|expired|breaking|broken""" - duration: Optional[str] - """When a blob is leased, specifies whether the lease is of infinite or fixed duration.""" - - def __init__(self, **kwargs: Any) -> None: - self.status = get_enum_value(kwargs.get('x-ms-lease-status')) - self.state = get_enum_value(kwargs.get('x-ms-lease-state')) - self.duration = get_enum_value(kwargs.get('x-ms-lease-duration')) - - @classmethod - def _from_generated(cls, generated): - lease = cls() - lease.status = get_enum_value(generated.properties.lease_status) - lease.state = get_enum_value(generated.properties.lease_state) - lease.duration = get_enum_value(generated.properties.lease_duration) - return lease - - -class ContentSettings(DictMixin): - """The content settings of a blob. - - :param Optional[str] content_type: - The content type specified for the blob. If no content type was - specified, the default content type is application/octet-stream. - :param Optional[str] content_encoding: - If the content_encoding has previously been set - for the blob, that value is stored. - :param Optional[str] content_language: - If the content_language has previously been set - for the blob, that value is stored. - :param Optional[str] content_disposition: - content_disposition conveys additional information about how to - process the response payload, and also can be used to attach - additional metadata. If content_disposition has previously been set - for the blob, that value is stored. - :param Optional[str] cache_control: - If the cache_control has previously been set for - the blob, that value is stored. - :param Optional[bytearray] content_md5: - If the content_md5 has been set for the blob, this response - header is stored so that the client can check for message content - integrity. - """ - - content_type: Optional[str] = None - """The content type specified for the blob.""" - content_encoding: Optional[str] = None - """The content encoding specified for the blob.""" - content_language: Optional[str] = None - """The content language specified for the blob.""" - content_disposition: Optional[str] = None - """The content disposition specified for the blob.""" - cache_control: Optional[str] = None - """The cache control specified for the blob.""" - content_md5: Optional[bytearray] = None - """The content md5 specified for the blob.""" - - def __init__( - self, content_type: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_disposition: Optional[str] = None, - cache_control: Optional[str] = None, - content_md5: Optional[bytearray] = None, - **kwargs: Any - ) -> None: - - self.content_type = content_type or kwargs.get('Content-Type') - self.content_encoding = content_encoding or kwargs.get('Content-Encoding') - self.content_language = content_language or kwargs.get('Content-Language') - self.content_md5 = content_md5 or kwargs.get('Content-MD5') - self.content_disposition = content_disposition or kwargs.get('Content-Disposition') - self.cache_control = cache_control or kwargs.get('Cache-Control') - - @classmethod - def _from_generated(cls, generated): - settings = cls() - settings.content_type = generated.properties.content_type or None - settings.content_encoding = generated.properties.content_encoding or None - settings.content_language = generated.properties.content_language or None - settings.content_md5 = generated.properties.content_md5 or None - settings.content_disposition = generated.properties.content_disposition or None - settings.cache_control = generated.properties.cache_control or None - return settings - - -class CopyProperties(DictMixin): - """Blob Copy Properties. - - These properties will be `None` if this blob has never been the destination - in a Copy Blob operation, or if this blob has been modified after a concluded - Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List. - """ - - id: Optional[str] - """String identifier for the last attempted Copy Blob operation where this blob - was the destination blob.""" - source: Optional[str] - """URL up to 2 KB in length that specifies the source blob used in the last attempted - Copy Blob operation where this blob was the destination blob.""" - status: Optional[str] - """State of the copy operation identified by Copy ID, with these values: - success: Copy completed successfully. - pending: Copy is in progress. Check copy_status_description if intermittent, non-fatal errors impede copy progress - but don't cause failure. - aborted: Copy was ended by Abort Copy Blob. - failed: Copy failed. See copy_status_description for failure details.""" - progress: Optional[str] - """Contains the number of bytes copied and the total bytes in the source in the last - attempted Copy Blob operation where this blob was the destination blob. Can show - between 0 and Content-Length bytes copied.""" - completion_time: Optional["datetime"] - """Conclusion time of the last attempted Copy Blob operation where this blob was the - destination blob. This value can specify the time of a completed, aborted, or - failed copy attempt.""" - status_description: Optional[str] - """Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal - or non-fatal copy operation failure.""" - incremental_copy: Optional[bool] - """Copies the snapshot of the source page blob to a destination page blob. - The snapshot is copied such that only the differential changes between - the previously copied snapshot are transferred to the destination.""" - destination_snapshot: Optional["datetime"] - """Included if the blob is incremental copy blob or incremental copy snapshot, - if x-ms-copy-status is success. Snapshot time of the last successful - incremental copy snapshot for this blob.""" - - def __init__(self, **kwargs: Any) -> None: - self.id = kwargs.get('x-ms-copy-id') - self.source = kwargs.get('x-ms-copy-source') - self.status = get_enum_value(kwargs.get('x-ms-copy-status')) - self.progress = kwargs.get('x-ms-copy-progress') - self.completion_time = kwargs.get('x-ms-copy-completion-time') - self.status_description = kwargs.get('x-ms-copy-status-description') - self.incremental_copy = kwargs.get('x-ms-incremental-copy') - self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot') - - @classmethod - def _from_generated(cls, generated): - copy = cls() - copy.id = generated.properties.copy_id or None - copy.status = get_enum_value(generated.properties.copy_status) or None - copy.source = generated.properties.copy_source or None - copy.progress = generated.properties.copy_progress or None - copy.completion_time = generated.properties.copy_completion_time or None - copy.status_description = generated.properties.copy_status_description or None - copy.incremental_copy = generated.properties.incremental_copy or None - copy.destination_snapshot = generated.properties.destination_snapshot or None - return copy - - -class BlobBlock(DictMixin): - """BlockBlob Block class. - - :param str block_id: - Block id. - :param BlockState state: - Block state. Possible values: BlockState.COMMITTED | BlockState.UNCOMMITTED - """ - - block_id: str - """Block id.""" - state: BlockState - """Block state.""" - size: int - """Block size.""" - - def __init__(self, block_id: str, state: BlockState = BlockState.LATEST) -> None: - self.id = block_id - self.state = state - self.size = None # type: ignore [assignment] - - @classmethod - def _from_generated(cls, generated): - try: - decoded_bytes = decode_base64_to_bytes(generated.name) - block_id = decoded_bytes.decode('utf-8') - # this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded - # while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it - # means we didn't base64 encode it when stage the block, we want to use the returned block_id directly. - except UnicodeDecodeError: - block_id = generated.name - block = cls(block_id) - block.size = generated.size - return block - - -class PageRange(DictMixin): - """Page Range for page blob. - - :param int start: - Start of page range in bytes. - :param int end: - End of page range in bytes. - """ - - start: Optional[int] = None - """Start of page range in bytes.""" - end: Optional[int] = None - """End of page range in bytes.""" - cleared: bool - """Whether the range has been cleared.""" - - def __init__(self, start: Optional[int] = None, end: Optional[int] = None, *, cleared: bool = False) -> None: - self.start = start - self.end = end - self.cleared = cleared - - -class PageRangePaged(PageIterator): - def __init__(self, command, results_per_page=None, continuation_token=None): - super(PageRangePaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.results_per_page = results_per_page - self.location_mode = None - self.current_page = [] - - def _get_next_cb(self, continuation_token): - try: - return self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.current_page = self._build_page(self._response) - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_page(response): - if not response: - raise StopIteration - - return parse_page_list(response) - - -class ContainerSasPermissions(object): - """ContainerSasPermissions class to be used with the - :func:`~azure.storage.blob.generate_container_sas` function and - for the AccessPolicies used with - :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. - - :param bool read: - Read the content, properties, metadata or block list of any blob in the - container. Use any blob in the container as the source of a copy operation. - :param bool write: - For any blob in the container, create or write content, properties, - metadata, or block list. Snapshot or lease the blob. Resize the blob - (page blob only). Use the blob as the destination of a copy operation - within the same account. Note: You cannot grant permissions to read or - write container properties or metadata, nor to lease a container, with - a container SAS. Use an account SAS instead. - :param bool delete: - Delete any blob in the container. Note: You cannot grant permissions to - delete a container with a container SAS. Use an account SAS instead. - :param bool delete_previous_version: - Delete the previous blob version for the versioning enabled storage account. - :param bool list: - List blobs in the container. - :param bool tag: - Set or get tags on the blobs in the container. - :keyword bool add: - Add a block to an append blob. - :keyword bool create: - Write a new blob, snapshot a blob, or copy a blob to a new blob. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - :keyword bool filter_by_tags: - To enable finding blobs by tags. - :keyword bool move: - Move a blob or a directory and its contents to a new location. - :keyword bool execute: - Get the system properties and, if the hierarchical namespace is enabled for the storage account, - get the POSIX ACL of a blob. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - """ - - read: bool = False - """The read permission for container SAS.""" - write: bool = False - """The write permission for container SAS.""" - delete: bool = False - """The delete permission for container SAS.""" - delete_previous_version: bool = False - """Permission to delete previous blob version for versioning enabled - storage accounts.""" - list: bool = False - """The list permission for container SAS.""" - tag: bool = False - """Set or get tags on the blobs in the container.""" - add: Optional[bool] - """Add a block to an append blob.""" - create: Optional[bool] - """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" - permanent_delete: Optional[bool] - """To enable permanent delete on the blob is permitted.""" - move: Optional[bool] - """Move a blob or a directory and its contents to a new location.""" - execute: Optional[bool] - """Get the system properties and, if the hierarchical namespace is enabled for the storage account, - get the POSIX ACL of a blob.""" - set_immutability_policy: Optional[bool] - """To get immutability policy, you just need read permission.""" - - def __init__( - self, read: bool = False, - write: bool = False, - delete: bool = False, - list: bool = False, - delete_previous_version: bool = False, - tag: bool = False, - **kwargs: Any - ) -> None: # pylint: disable=redefined-builtin - self.read = read - self.add = kwargs.pop('add', False) - self.create = kwargs.pop('create', False) - self.write = write - self.delete = delete - self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) - self.list = list - self.tag = tag - self.filter_by_tags = kwargs.pop('filter_by_tags', False) - self.move = kwargs.pop('move', False) - self.execute = kwargs.pop('execute', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) - self._str = (('r' if self.read else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + - ('l' if self.list else '') + - ('t' if self.tag else '') + - ('f' if self.filter_by_tags else '') + - ('m' if self.move else '') + - ('e' if self.execute else '') + - ('i' if self.set_immutability_policy else '')) - - def __str__(self): - return self._str - - @classmethod - def from_string(cls, permission: str) -> "ContainerSasPermissions": - """Create a ContainerSasPermissions from a string. - - To specify read, write, delete, or list permissions you need only to - include the first letter of the word in the string. E.g. For read and - write permissions, you would provide a string "rw". - - :param str permission: The string which dictates the read, write, delete, - and list permissions. - :return: A ContainerSasPermissions object - :rtype: ~azure.storage.blob.ContainerSasPermissions - """ - p_read = 'r' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission - p_list = 'l' in permission - p_tag = 't' in permission - p_filter_by_tags = 'f' in permission - p_move = 'm' in permission - p_execute = 'e' in permission - p_set_immutability_policy = 'i' in permission - parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, - delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add, - create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags, - move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) - - return parsed - - -class AccessPolicy(GenAccessPolicy): - """Access Policy class used by the set and get access policy methods in each service. - - A stored access policy can specify the start time, expiry time, and - permissions for the Shared Access Signatures with which it's associated. - Depending on how you want to control access to your resource, you can - specify all of these parameters within the stored access policy, and omit - them from the URL for the Shared Access Signature. Doing so permits you to - modify the associated signature's behavior at any time, as well as to revoke - it. Or you can specify one or more of the access policy parameters within - the stored access policy, and the others on the URL. Finally, you can - specify all of the parameters on the URL. In this case, you can use the - stored access policy to revoke the signature, but not to modify its behavior. - - Together the Shared Access Signature and the stored access policy must - include all fields required to authenticate the signature. If any required - fields are missing, the request will fail. Likewise, if a field is specified - both in the Shared Access Signature URL and in the stored access policy, the - request will fail with status code 400 (Bad Request). - - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: Optional[Union[ContainerSasPermissions, str]] - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :paramtype expiry: Optional[Union[str, datetime]] - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. - :paramtype start: Optional[Union[str, datetime]] - """ - - permission: Optional[Union[ContainerSasPermissions, str]] # type: ignore [assignment] - """The permissions associated with the shared access signature. The user is restricted to - operations allowed by the permissions.""" - expiry: Optional[Union["datetime", str]] # type: ignore [assignment] - """The time at which the shared access signature becomes invalid.""" - start: Optional[Union["datetime", str]] # type: ignore [assignment] - """The time at which the shared access signature becomes valid.""" - - def __init__( - self, permission: Optional[Union["ContainerSasPermissions", str]] = None, - expiry: Optional[Union[str, "datetime"]] = None, - start: Optional[Union[str, "datetime"]] = None - ) -> None: - self.start = start - self.expiry = expiry - self.permission = permission - - -class BlobSasPermissions(object): - """BlobSasPermissions class to be used with the - :func:`~azure.storage.blob.generate_blob_sas` function. - - :param bool read: - Read the content, properties, metadata and block list. Use the blob as - the source of a copy operation. - :param bool add: - Add a block to an append blob. - :param bool create: - Write a new blob, snapshot a blob, or copy a blob to a new blob. - :param bool write: - Create or write content, properties, metadata, or block list. Snapshot - or lease the blob. Resize the blob (page blob only). Use the blob as the - destination of a copy operation within the same account. - :param bool delete: - Delete the blob. - :param bool delete_previous_version: - Delete the previous blob version for the versioning enabled storage account. - :param bool tag: - Set or get tags on the blob. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - :keyword bool move: - Move a blob or a directory and its contents to a new location. - :keyword bool execute: - Get the system properties and, if the hierarchical namespace is enabled for the storage account, - get the POSIX ACL of a blob. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - """ - - read: bool = False - """The read permission for Blob SAS.""" - add: Optional[bool] - """The add permission for Blob SAS.""" - create: Optional[bool] - """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" - write: bool = False - """The write permission for Blob SAS.""" - delete: bool = False - """The delete permission for Blob SAS.""" - delete_previous_version: bool = False - """Permission to delete previous blob version for versioning enabled - storage accounts.""" - tag: bool = False - """Set or get tags on the blobs in the Blob.""" - permanent_delete: Optional[bool] - """To enable permanent delete on the blob is permitted.""" - move: Optional[bool] - """Move a blob or a directory and its contents to a new location.""" - execute: Optional[bool] - """Get the system properties and, if the hierarchical namespace is enabled for the storage account, - get the POSIX ACL of a blob.""" - set_immutability_policy: Optional[bool] - """To get immutability policy, you just need read permission.""" - - def __init__( - self, read: bool = False, - add: bool = False, - create: bool = False, - write: bool = False, - delete: bool = False, - delete_previous_version: bool = False, - tag: bool = False, - **kwargs: Any - ) -> None: - self.read = read - self.add = add - self.create = create - self.write = write - self.delete = delete - self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) - self.tag = tag - self.move = kwargs.pop('move', False) - self.execute = kwargs.pop('execute', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) - self._str = (('r' if self.read else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + - ('t' if self.tag else '') + - ('m' if self.move else '') + - ('e' if self.execute else '') + - ('i' if self.set_immutability_policy else '')) - - def __str__(self): - return self._str - - @classmethod - def from_string(cls, permission: str) -> "BlobSasPermissions": - """Create a BlobSasPermissions from a string. - - To specify read, add, create, write, or delete permissions you need only to - include the first letter of the word in the string. E.g. For read and - write permissions, you would provide a string "rw". - - :param str permission: The string which dictates the read, add, create, - write, or delete permissions. - :return: A BlobSasPermissions object - :rtype: ~azure.storage.blob.BlobSasPermissions - """ - p_read = 'r' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission - p_tag = 't' in permission - p_move = 'm' in permission - p_execute = 'e' in permission - p_set_immutability_policy = 'i' in permission - - parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, - delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete, - move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) - - return parsed - - -class CustomerProvidedEncryptionKey(object): - """ - All data in Azure Storage is encrypted at-rest using an account-level encryption key. - In versions 2018-06-17 and newer, you can manage the key used to encrypt blob contents - and application metadata per-blob by providing an AES-256 encryption key in requests to the storage service. - - When you use a customer-provided key, Azure Storage does not manage or persist your key. - When writing data to a blob, the provided key is used to encrypt your data before writing it to disk. - A SHA-256 hash of the encryption key is written alongside the blob contents, - and is used to verify that all subsequent operations against the blob use the same encryption key. - This hash cannot be used to retrieve the encryption key or decrypt the contents of the blob. - When reading a blob, the provided key is used to decrypt your data after reading it from disk. - In both cases, the provided encryption key is securely discarded - as soon as the encryption or decryption process completes. - - :param str key_value: - Base64-encoded AES-256 encryption key value. - :param str key_hash: - Base64-encoded SHA256 of the encryption key. - """ - - key_value: str - """Base64-encoded AES-256 encryption key value.""" - key_hash: str - """Base64-encoded SHA256 of the encryption key.""" - algorithm: str - """Specifies the algorithm to use when encrypting data using the given key. Must be AES256.""" - - def __init__(self, key_value: str, key_hash: str) -> None: - self.key_value = key_value - self.key_hash = key_hash - self.algorithm = 'AES256' - - -class ContainerEncryptionScope(object): - """The default encryption scope configuration for a container. - - This scope is used implicitly for all future writes within the container, - but can be overridden per blob operation. - - .. versionadded:: 12.2.0 - - :param str default_encryption_scope: - Specifies the default encryption scope to set on the container and use for - all future writes. - :param bool prevent_encryption_scope_override: - If true, prevents any request from specifying a different encryption scope than the scope - set on the container. Default value is false. - """ - - default_encryption_scope: str - """Specifies the default encryption scope to set on the container and use for - all future writes.""" - prevent_encryption_scope_override: bool - """If true, prevents any request from specifying a different encryption scope than the scope - set on the container.""" - - def __init__(self, default_encryption_scope: str, **kwargs: Any) -> None: - self.default_encryption_scope = default_encryption_scope - self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False) - - @classmethod - def _from_generated(cls, generated): - if generated.properties.default_encryption_scope: - scope = cls( - generated.properties.default_encryption_scope, - prevent_encryption_scope_override=generated.properties.prevent_encryption_scope_override or False - ) - return scope - return None - - -class DelimitedJsonDialect(DictMixin): - """Defines the input or output JSON serialization for a blob data query. - - :keyword str delimiter: The line separator character, default value is '\\\\n'. - """ - - def __init__(self, **kwargs: Any) -> None: - self.delimiter = kwargs.pop('delimiter', '\n') - - -class DelimitedTextDialect(DictMixin): - """Defines the input or output delimited (CSV) serialization for a blob query request. - - :keyword str delimiter: - Column separator, defaults to ','. - :keyword str quotechar: - Field quote, defaults to '"'. - :keyword str lineterminator: - Record separator, defaults to '\\\\n'. - :keyword str escapechar: - Escape char, defaults to empty. - :keyword bool has_header: - Whether the blob data includes headers in the first line. The default value is False, meaning that the - data will be returned inclusive of the first line. If set to True, the data will be returned exclusive - of the first line. - """ - - def __init__(self, **kwargs: Any) -> None: - self.delimiter = kwargs.pop('delimiter', ',') - self.quotechar = kwargs.pop('quotechar', '"') - self.lineterminator = kwargs.pop('lineterminator', '\n') - self.escapechar = kwargs.pop('escapechar', "") - self.has_header = kwargs.pop('has_header', False) - - -class ArrowDialect(ArrowField): - """field of an arrow schema. - - All required parameters must be populated in order to send to Azure. - - :param ~azure.storage.blob.ArrowType type: Arrow field type. - :keyword str name: The name of the field. - :keyword int precision: The precision of the field. - :keyword int scale: The scale of the field. - """ - - def __init__(self, type, **kwargs: Any) -> None: # pylint: disable=redefined-builtin - super(ArrowDialect, self).__init__(type=type, **kwargs) - - -class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - - INT64 = "int64" - BOOL = "bool" - TIMESTAMP_MS = "timestamp[ms]" - STRING = "string" - DOUBLE = "double" - DECIMAL = 'decimal' - - -class ObjectReplicationRule(DictMixin): - """Policy id and rule ids applied to a blob.""" - - rule_id: str - """Rule id.""" - status: str - """The status of the rule. It could be "Complete" or "Failed" """ - - def __init__(self, **kwargs: Any) -> None: - self.rule_id = kwargs.pop('rule_id', None) # type: ignore [assignment] - self.status = kwargs.pop('status', None) # type: ignore [assignment] - - -class ObjectReplicationPolicy(DictMixin): - """Policy id and rule ids applied to a blob.""" - - policy_id: str - """Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair.""" - rules: List[ObjectReplicationRule] - """Within each policy there may be multiple replication rules. - e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3""" - - def __init__(self, **kwargs: Any) -> None: - self.policy_id = kwargs.pop('policy_id', None) # type: ignore [assignment] - self.rules = kwargs.pop('rules', []) - - -class BlobProperties(DictMixin): - """Blob Properties.""" - - name: str - """The name of the blob.""" - container: str - """The container in which the blob resides.""" - snapshot: Optional[str] - """Datetime value that uniquely identifies the blob snapshot.""" - blob_type: "BlobType" - """String indicating this blob's type.""" - metadata: Dict[str, str] - """Name-value pairs associated with the blob as metadata.""" - last_modified: "datetime" - """A datetime object representing the last time the blob was modified.""" - etag: str - """The ETag contains a value that you can use to perform operations - conditionally.""" - size: int - """The size of the content returned. If the entire blob was requested, - the length of blob in bytes. If a subset of the blob was requested, the - length of the returned subset.""" - content_range: Optional[str] - """Indicates the range of bytes returned in the event that the client - requested a subset of the blob.""" - append_blob_committed_block_count: Optional[int] - """(For Append Blobs) Number of committed blocks in the blob.""" - is_append_blob_sealed: Optional[bool] - """Indicate if the append blob is sealed or not.""" - page_blob_sequence_number: Optional[int] - """(For Page Blobs) Sequence number for page blob used for coordinating - concurrent writes.""" - server_encrypted: bool - """Set to true if the blob is encrypted on the server.""" - copy: "CopyProperties" - """Stores all the copy properties for the blob.""" - content_settings: ContentSettings - """Stores all the content settings for the blob.""" - lease: LeaseProperties - """Stores all the lease information for the blob.""" - blob_tier: Optional[StandardBlobTier] - """Indicates the access tier of the blob. The hot tier is optimized - for storing data that is accessed frequently. The cool storage tier - is optimized for storing data that is infrequently accessed and stored - for at least a month. The archive tier is optimized for storing - data that is rarely accessed and stored for at least six months - with flexible latency requirements.""" - rehydrate_priority: Optional[str] - """Indicates the priority with which to rehydrate an archived blob""" - blob_tier_change_time: Optional["datetime"] - """Indicates when the access tier was last changed.""" - blob_tier_inferred: Optional[bool] - """Indicates whether the access tier was inferred by the service. - If false, it indicates that the tier was set explicitly.""" - deleted: Optional[bool] - """Whether this blob was deleted.""" - deleted_time: Optional["datetime"] - """A datetime object representing the time at which the blob was deleted.""" - remaining_retention_days: Optional[int] - """The number of days that the blob will be retained before being permanently deleted by the service.""" - creation_time: "datetime" - """Indicates when the blob was created, in UTC.""" - archive_status: Optional[str] - """Archive status of blob.""" - encryption_key_sha256: Optional[str] - """The SHA-256 hash of the provided encryption key.""" - encryption_scope: Optional[str] - """A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised.""" - request_server_encrypted: Optional[bool] - """Whether this blob is encrypted.""" - object_replication_source_properties: Optional[List[ObjectReplicationPolicy]] - """Only present for blobs that have policy ids and rule ids applied to them.""" - object_replication_destination_policy: Optional[str] - """Represents the Object Replication Policy Id that created this blob.""" - last_accessed_on: Optional["datetime"] - """Indicates when the last Read/Write operation was performed on a Blob.""" - tag_count: Optional[int] - """Tags count on this blob.""" - tags: Optional[Dict[str, str]] - """Key value pair of tags on this blob.""" - has_versions_only: Optional[bool] - """A true value indicates the root blob is deleted""" - immutability_policy: ImmutabilityPolicy - """Specifies the immutability policy of a blob, blob snapshot or blob version.""" - has_legal_hold: Optional[bool] - """Specified if a legal hold should be set on the blob. - Currently this parameter of upload_blob() API is for BlockBlob only.""" - - def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('name') # type: ignore [assignment] - self.container = None # type: ignore [assignment] - self.snapshot = kwargs.get('x-ms-snapshot') - self.version_id = kwargs.get('x-ms-version-id') - self.is_current_version = kwargs.get('x-ms-is-current-version') - self.blob_type = BlobType(kwargs['x-ms-blob-type']) if kwargs.get('x-ms-blob-type') else None # type: ignore [assignment] # pylint: disable=line-too-long - self.metadata = kwargs.get('metadata') # type: ignore [assignment] - self.encrypted_metadata = kwargs.get('encrypted_metadata') - self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] - self.etag = kwargs.get('ETag') # type: ignore [assignment] - self.size = kwargs.get('Content-Length') # type: ignore [assignment] - self.content_range = kwargs.get('Content-Range') - self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') - self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') - self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') - self.server_encrypted = kwargs.get('x-ms-server-encrypted') # type: ignore [assignment] - self.copy = CopyProperties(**kwargs) - self.content_settings = ContentSettings(**kwargs) - self.lease = LeaseProperties(**kwargs) - self.blob_tier = kwargs.get('x-ms-access-tier') - self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') - self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') - self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') - self.deleted = False - self.deleted_time = None - self.remaining_retention_days = None - self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment] - self.archive_status = kwargs.get('x-ms-archive-status') - self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') - self.encryption_scope = kwargs.get('x-ms-encryption-scope') - self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') - self.object_replication_source_properties = kwargs.get('object_replication_source_properties') - self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') - self.last_accessed_on = kwargs.get('x-ms-last-access-time') - self.tag_count = kwargs.get('x-ms-tag-count') - self.tags = None - self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'), - policy_mode=kwargs.get('x-ms-immutability-policy-mode')) - self.has_legal_hold = kwargs.get('x-ms-legal-hold') - self.has_versions_only = None - - -class BlobQueryError(object): - """The error happened during quick query operation.""" - - error: Optional[str] - """The name of the error.""" - is_fatal: bool - """If true, this error prevents further query processing. More result data may be returned, - but there is no guarantee that all of the original data will be processed. - If false, this error does not prevent further query processing.""" - description: Optional[str] - """A description of the error.""" - position: Optional[int] - """The blob offset at which the error occurred.""" - - def __init__( - self, error: Optional[str] = None, - is_fatal: bool = False, - description: Optional[str] = None, - position: Optional[int] = None - ) -> None: - self.error = error - self.is_fatal = is_fatal - self.description = description - self.position = position diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_patch.py similarity index 69% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_patch.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/_patch.py index 71dde502c70f..f7dd32510333 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_patch.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_patch.py @@ -2,19 +2,13 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ - - """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports +from typing import List - from typing import List -__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py deleted file mode 100644 index 95f8a4427bba..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py +++ /dev/null @@ -1,194 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from io import BytesIO -from typing import Any, Dict, Generator, IO, Iterable, Optional, Type, Union, TYPE_CHECKING - -from ._shared.avro.avro_io import DatumReader -from ._shared.avro.datafile import DataFileReader - -if TYPE_CHECKING: - from ._models import BlobQueryError - - -class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to read query results.""" - - name: str - """The name of the blob being quered.""" - container: str - """The name of the container where the blob is.""" - response_headers: Dict[str, Any] - """The response_headers of the quick query request.""" - record_delimiter: str - """The delimiter used to separate lines, or records with the data. The `records` - method will return these lines via a generator.""" - - def __init__( - self, - name: str = None, # type: ignore [assignment] - container: str = None, # type: ignore [assignment] - errors: Any = None, - record_delimiter: str = '\n', - encoding: Optional[str] = None, - headers: Dict[str, Any] = None, # type: ignore [assignment] - response: Any = None, - error_cls: Type["BlobQueryError"] = None, # type: ignore [assignment] - ) -> None: - self.name = name - self.container = container - self.response_headers = headers - self.record_delimiter = record_delimiter - self._size = 0 - self._bytes_processed = 0 - self._errors = errors - self._encoding = encoding - self._parsed_results = DataFileReader(QuickQueryStreamer(response), DatumReader()) - self._first_result = self._process_record(next(self._parsed_results)) - self._error_cls = error_cls - - def __len__(self): - return self._size - - def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]: - self._size = result.get('totalBytes', self._size) - self._bytes_processed = result.get('bytesScanned', self._bytes_processed) - if 'data' in result: - return result.get('data') - if 'fatal' in result: - error = self._error_cls( - error=result['name'], - is_fatal=result['fatal'], - description=result['description'], - position=result['position'] - ) - if self._errors: - self._errors(error) - return None - - def _iter_stream(self) -> Generator[bytes, None, None]: - if self._first_result is not None: - yield self._first_result - for next_result in self._parsed_results: - processed_result = self._process_record(next_result) - if processed_result is not None: - yield processed_result - - def readall(self) -> Union[bytes, str]: - """Return all query results. - - This operation is blocking until all data is downloaded. - If encoding has been configured - this will be used to decode individual - records are they are received. - - :returns: The query results. - :rtype: Union[bytes, str] - """ - stream = BytesIO() - self.readinto(stream) - data = stream.getvalue() - if self._encoding: - return data.decode(self._encoding) - return data - - def readinto(self, stream: IO) -> None: - """Download the query result to a stream. - - :param IO stream: - The stream to download to. This can be an open file-handle, - or any writable stream. - :returns: None - """ - for record in self._iter_stream(): - stream.write(record) - - def records(self) -> Iterable[Union[bytes, str]]: - """Returns a record generator for the query result. - - Records will be returned line by line. - If encoding has been configured - this will be used to decode individual - records are they are received. - - :returns: A record generator for the query result. - :rtype: Iterable[Union[bytes, str]] - """ - delimiter = self.record_delimiter.encode('utf-8') - for record_chunk in self._iter_stream(): - for record in record_chunk.split(delimiter): - if self._encoding: - yield record.decode(self._encoding) - else: - yield record - - -class QuickQueryStreamer(object): - """ - File-like streaming iterator. - """ - - def __init__(self, generator): - self.generator = generator - self.iterator = iter(generator) - self._buf = b"" - self._point = 0 - self._download_offset = 0 - self._buf_start = 0 - self.file_length = None - - def __len__(self): - return self.file_length - - def __iter__(self): - return self.iterator - - @staticmethod - def seekable(): - return True - - def __next__(self): - next_part = next(self.iterator) - self._download_offset += len(next_part) - return next_part - - def tell(self): - return self._point - - def seek(self, offset, whence=0): - if whence == 0: - self._point = offset - elif whence == 1: - self._point += offset - else: - raise ValueError("whence must be 0, or 1") - if self._point < 0: # pylint: disable=consider-using-max-builtin - self._point = 0 # XXX is this right? - - def read(self, size): - try: - # keep reading from the generator until the buffer of this stream has enough data to read - while self._point + size > self._download_offset: - self._buf += self.__next__() - except StopIteration: - self.file_length = self._download_offset - - start_point = self._point - - # EOF - self._point = min(self._point + size, self._download_offset) - - relative_start = start_point - self._buf_start - if relative_start < 0: - raise ValueError("Buffer has dumped too much data") - relative_end = relative_start + size - data = self._buf[relative_start: relative_end] - - # dump the extra data in buffer - # buffer start--------------------16bytes----current read position - dumped_size = max(relative_end - 16 - relative_start, 0) - self._buf_start += dumped_size - self._buf = self._buf[dumped_size:] - - return data diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialization.py similarity index 86% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/_serialization.py index 8139854b97bb..01a226bd7f14 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialization.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -24,7 +25,6 @@ # # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -52,7 +52,6 @@ MutableMapping, Type, List, - Mapping, ) try: @@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: @@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -184,15 +190,30 @@ class UTC(datetime.tzinfo): """Time Zone info for handling UTC""" def utcoffset(self, dt): - """UTF offset for UTC is 0.""" + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ return datetime.timedelta(0) def tzname(self, dt): - """Timestamp representation.""" + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ return "Z" def dst(self, dt): - """No daylight saving for UTC.""" + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ return datetime.timedelta(hours=1) @@ -235,24 +256,26 @@ def __getinitargs__(self): _FLATTEN = re.compile(r"(? None: self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): @@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, @@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -395,7 +444,7 @@ def _infer_class_models(cls): client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @@ -426,9 +476,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -448,7 +500,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access return result @classmethod @@ -456,6 +508,11 @@ def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None @@ -501,11 +558,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer(object): # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -560,13 +619,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict :raises: SerializationError if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -592,12 +654,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = target_obj._create_xml_node() # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): continue if attr_name == "additional_properties" and attr_desc["key"] == "": @@ -633,7 +697,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -664,17 +729,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) raise SerializationError(msg) from err - else: - return serialized + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -703,7 +768,7 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: raise SerializationError("Unable to build a model: " + str(err)) from err @@ -712,9 +777,11 @@ def body(self, data, data_type, **kwargs): def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str + :returns: The serialized URL path :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -728,21 +795,20 @@ def url(self, name, data, data_type, **kwargs): output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator @@ -759,19 +825,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -780,21 +847,20 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") @@ -805,7 +871,7 @@ def serialize_data(self, data, data_type, **kwargs): if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class @@ -821,11 +887,10 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -841,23 +906,26 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + return eval(data_type)(data) # nosec # pylint: disable=eval-used @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -871,8 +939,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -882,15 +949,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -945,9 +1010,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -971,7 +1035,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -979,6 +1043,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -1003,7 +1068,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1034,56 +1099,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1091,11 +1161,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1105,30 +1176,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1141,12 +1214,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1172,13 +1246,14 @@ def serialize_iso(attr, **kwargs): raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int :raises: SerializationError if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1186,11 +1261,11 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data @@ -1211,7 +1286,9 @@ def rest_key_extractor(attr, attr_desc, data): return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1232,17 +1309,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1279,7 +1368,7 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1331,22 +1420,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1363,7 +1451,7 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") def __init__(self, classes: Optional[Mapping[str, type]] = None): self.deserialize_type = { @@ -1403,11 +1491,12 @@ def __call__(self, target_obj, response_data, content_type=None): :param str content_type: Swagger "produces" if available. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree @@ -1416,12 +1505,13 @@ def _deserialize(self, target_obj, data): :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1440,13 +1530,13 @@ def _deserialize(self, target_obj, data): if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1476,9 +1566,8 @@ def _deserialize(self, target_obj, data): except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1505,6 +1594,8 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None @@ -1516,7 +1607,7 @@ def _classify_target(self, target, data): return target, target try: - target = target._classify(data, self.dependencies) # type: ignore + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1531,10 +1622,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1552,10 +1645,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1579,14 +1674,21 @@ def _unpack_content(raw_data, content_type=None): def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] + readonly = [ + k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access + ] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: @@ -1596,7 +1698,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1605,15 +1707,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1627,7 +1730,11 @@ def deserialize_data(self, data, data_type): if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) @@ -1647,14 +1754,14 @@ def deserialize_data(self, data, data_type): msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1671,6 +1778,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1681,11 +1789,12 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ @@ -1720,11 +1829,10 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1732,6 +1840,7 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ @@ -1743,24 +1852,23 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, str): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + return eval(data_type)(attr) # nosec # pylint: disable=eval-used @staticmethod def deserialize_unicode(data): @@ -1768,6 +1876,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1781,8 +1890,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1794,6 +1902,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1804,9 +1913,9 @@ def deserialize_enum(data, enum_obj): # Workaround. We might consider remove it in the future. try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1822,6 +1931,7 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1834,6 +1944,7 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1849,8 +1960,9 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal + :return: Deserialized decimal :raises: DeserializationError if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text @@ -1865,6 +1977,7 @@ def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int :raises: ValueError if string format invalid. """ @@ -1877,6 +1990,7 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ @@ -1887,14 +2001,14 @@ def deserialize_duration(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise DeserializationError(msg) from err - else: - return duration + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date :raises: DeserializationError if string format invalid. """ @@ -1910,6 +2024,7 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time :raises: DeserializationError if string format invalid. """ @@ -1924,6 +2039,7 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -1939,14 +2055,14 @@ def deserialize_rfc(attr): except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -1976,8 +2092,7 @@ def deserialize_iso(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_unix(attr): @@ -1985,6 +2100,7 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime :raises: DeserializationError if format invalid """ @@ -1996,5 +2112,4 @@ def deserialize_unix(attr): except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py deleted file mode 100644 index 67dc2f9a2aee..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ /dev/null @@ -1,212 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from typing import Any, cast, Dict, Optional, Tuple, Union, TYPE_CHECKING - -try: - from urllib.parse import quote -except ImportError: - from urllib2 import quote # type: ignore - -from azure.core import MatchConditions - -from ._generated.models import ( - ArrowConfiguration, - BlobTag, - BlobTags, - ContainerCpkScopeInfo, - CpkScopeInfo, - DelimitedTextConfiguration, - JsonTextConfiguration, - LeaseAccessConditions, - ModifiedAccessConditions, - QueryFormat, - QueryFormatType, - QuerySerialization, - SourceModifiedAccessConditions -) -from ._models import ContainerEncryptionScope, DelimitedJsonDialect - -if TYPE_CHECKING: - from ._lease import BlobLeaseClient - - -_SUPPORTED_API_VERSIONS = [ - '2019-02-02', - '2019-07-07', - '2019-10-10', - '2019-12-12', - '2020-02-10', - '2020-04-08', - '2020-06-12', - '2020-08-04', - '2020-10-02', - '2020-12-06', - '2021-02-12', - '2021-04-10', - '2021-06-08', - '2021-08-06', - '2021-12-02', - '2022-11-02', - '2023-01-03', - '2023-05-03', - '2023-08-03', - '2023-11-03', - '2024-05-04', - '2024-08-04', - '2024-11-04', -] - - -def _get_match_headers( - kwargs: Dict[str, Any], - match_param: str, - etag_param: str -) -> Tuple[Optional[str], Optional[Any]]: - if_match = None - if_none_match = None - match_condition = kwargs.pop(match_param, None) - if match_condition == MatchConditions.IfNotModified: - if_match = kwargs.pop(etag_param, None) - if not if_match: - raise ValueError(f"'{match_param}' specified without '{etag_param}'.") - elif match_condition == MatchConditions.IfPresent: - if_match = '*' - elif match_condition == MatchConditions.IfModified: - if_none_match = kwargs.pop(etag_param, None) - if not if_none_match: - raise ValueError(f"'{match_param}' specified without '{etag_param}'.") - elif match_condition == MatchConditions.IfMissing: - if_none_match = '*' - elif match_condition is None: - if kwargs.get(etag_param): - raise ValueError(f"'{etag_param}' specified without '{match_param}'.") - else: - raise TypeError(f"Invalid match condition: {match_condition}") - return if_match, if_none_match - - -def get_access_conditions(lease: Optional[Union["BlobLeaseClient", str]]) -> Optional[LeaseAccessConditions]: - try: - lease_id = lease.id # type: ignore - except AttributeError: - lease_id = lease # type: ignore - return LeaseAccessConditions(lease_id=lease_id) if lease_id else None - - -def get_modify_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions: - if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') - return ModifiedAccessConditions( - if_modified_since=kwargs.pop('if_modified_since', None), - if_unmodified_since=kwargs.pop('if_unmodified_since', None), - if_match=if_match or kwargs.pop('if_match', None), - if_none_match=if_none_match or kwargs.pop('if_none_match', None), - if_tags=kwargs.pop('if_tags_match_condition', None) - ) - - -def get_source_conditions(kwargs: Dict[str, Any]) -> SourceModifiedAccessConditions: - if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') - return SourceModifiedAccessConditions( - source_if_modified_since=kwargs.pop('source_if_modified_since', None), - source_if_unmodified_since=kwargs.pop('source_if_unmodified_since', None), - source_if_match=if_match or kwargs.pop('source_if_match', None), - source_if_none_match=if_none_match or kwargs.pop('source_if_none_match', None), - source_if_tags=kwargs.pop('source_if_tags_match_condition', None) - ) - - -def get_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[CpkScopeInfo]: - if 'encryption_scope' in kwargs: - return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope')) - return None - - -def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCpkScopeInfo]: - encryption_scope = kwargs.pop('container_encryption_scope', None) - if encryption_scope: - if isinstance(encryption_scope, ContainerEncryptionScope): - return ContainerCpkScopeInfo( - default_encryption_scope=encryption_scope.default_encryption_scope, - prevent_encryption_scope_override=encryption_scope.prevent_encryption_scope_override - ) - if isinstance(encryption_scope, dict): - return ContainerCpkScopeInfo( - default_encryption_scope=encryption_scope['default_encryption_scope'], - prevent_encryption_scope_override=encryption_scope.get('prevent_encryption_scope_override') - ) - raise TypeError("Container encryption scope must be dict or type ContainerEncryptionScope.") - return None - - -def get_api_version(kwargs: Dict[str, Any]) -> str: - api_version = kwargs.get('api_version', None) - if api_version and api_version not in _SUPPORTED_API_VERSIONS: - versions = '\n'.join(_SUPPORTED_API_VERSIONS) - raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") - return api_version or _SUPPORTED_API_VERSIONS[-1] - -def get_version_id(self_vid: Optional[str], kwargs: Dict[str, Any]) -> Optional[str]: - if 'version_id' in kwargs: - return cast(str, kwargs.pop('version_id')) - return self_vid - -def serialize_blob_tags_header(tags: Optional[Dict[str, str]] = None) -> Optional[str]: - if tags is None: - return None - - components = [] - if tags: - for key, value in tags.items(): - components.append(quote(key, safe='.-')) - components.append('=') - components.append(quote(value, safe='.-')) - components.append('&') - - if components: - del components[-1] - - return ''.join(components) - - -def serialize_blob_tags(tags: Optional[Dict[str, str]] = None) -> BlobTags: - tag_list = [] - if tags: - tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()] - return BlobTags(blob_tag_set=tag_list) - - -def serialize_query_format(formater: Union[str, DelimitedJsonDialect]) -> Optional[QuerySerialization]: - if formater == "ParquetDialect": - qq_format = QueryFormat(type=QueryFormatType.PARQUET, parquet_text_configuration=' ') #type: ignore [arg-type] - elif isinstance(formater, DelimitedJsonDialect): - json_serialization_settings = JsonTextConfiguration(record_separator=formater.delimiter) - qq_format = QueryFormat(type=QueryFormatType.JSON, json_text_configuration=json_serialization_settings) - elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well - try: - headers = formater.has_header # type: ignore - except AttributeError: - headers = False - if isinstance(formater, str): - raise ValueError("Unknown string value provided. Accepted values: ParquetDialect") - csv_serialization_settings = DelimitedTextConfiguration( - column_separator=formater.delimiter, - field_quote=formater.quotechar, - record_separator=formater.lineterminator, - escape_char=formater.escapechar, - headers_present=headers - ) - qq_format = QueryFormat( - type=QueryFormatType.DELIMITED, - delimited_text_configuration=csv_serialization_settings - ) - elif isinstance(formater, list): - arrow_serialization_settings = ArrowConfiguration(schema=formater) - qq_format = QueryFormat(type=QueryFormatType.arrow, arrow_configuration=arrow_serialization_settings) - elif not formater: - return None - else: - raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect or ParquetDialect.") - return QuerySerialization(format=qq_format) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/__init__.py deleted file mode 100644 index a8b1a27d48f9..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import base64 -import hashlib -import hmac - -try: - from urllib.parse import quote, unquote -except ImportError: - from urllib2 import quote, unquote # type: ignore - - -def url_quote(url): - return quote(url) - - -def url_unquote(url): - return unquote(url) - - -def encode_base64(data): - if isinstance(data, str): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - -def decode_base64_to_bytes(data): - if isinstance(data, str): - data = data.encode('utf-8') - return base64.b64decode(data) - - -def decode_base64_to_text(data): - decoded_bytes = decode_base64_to_bytes(data) - return decoded_bytes.decode('utf-8') - - -def sign_string(key, string_to_sign, key_is_base64=True): - if key_is_base64: - key = decode_base64_to_bytes(key) - else: - if isinstance(key, str): - key = key.encode('utf-8') - if isinstance(string_to_sign, str): - string_to_sign = string_to_sign.encode('utf-8') - signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) - digest = signed_hmac_sha256.digest() - encoded_digest = encode_base64(digest) - return encoded_digest diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/authentication.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/authentication.py deleted file mode 100644 index e4d5ed730846..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/authentication.py +++ /dev/null @@ -1,245 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import logging -import re -from typing import List, Tuple -from urllib.parse import unquote, urlparse -from functools import cmp_to_key - -try: - from yarl import URL -except ImportError: - pass - -try: - from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import -except ImportError: - AioHttpTransport = None - -from azure.core.exceptions import ClientAuthenticationError -from azure.core.pipeline.policies import SansIOHTTPPolicy - -from . import sign_string - -logger = logging.getLogger(__name__) - - -table_lv0 = [ - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, 0x723, 0x725, - 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, - 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, - 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, - 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, - 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, - 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, 0x0, 0x750, 0x0, -] - -table_lv4 = [ - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, -] - -def compare(lhs: str, rhs: str) -> int: # pylint:disable=too-many-return-statements - tables = [table_lv0, table_lv4] - curr_level, i, j, n = 0, 0, 0, len(tables) - lhs_len = len(lhs) - rhs_len = len(rhs) - while curr_level < n: - if curr_level == (n - 1) and i != j: - if i > j: - return -1 - if i < j: - return 1 - return 0 - - w1 = tables[curr_level][ord(lhs[i])] if i < lhs_len else 0x1 - w2 = tables[curr_level][ord(rhs[j])] if j < rhs_len else 0x1 - - if w1 == 0x1 and w2 == 0x1: - i = 0 - j = 0 - curr_level += 1 - elif w1 == w2: - i += 1 - j += 1 - elif w1 == 0: - i += 1 - elif w2 == 0: - j += 1 - else: - if w1 < w2: - return -1 - if w1 > w2: - return 1 - return 0 - return 0 - - -# wraps a given exception with the desired exception type -def _wrap_exception(ex, desired_type): - msg = "" - if ex.args: - msg = ex.args[0] - return desired_type(msg) - -# This method attempts to emulate the sorting done by the service -def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]: - - # Build dict of tuples and list of keys - header_dict = {} - header_keys = [] - for k, v in input_headers: - header_dict[k] = v - header_keys.append(k) - - try: - header_keys = sorted(header_keys, key=cmp_to_key(compare)) - except ValueError as exc: - raise ValueError("Illegal character encountered when sorting headers.") from exc - - # Build list of sorted tuples - sorted_headers = [] - for key in header_keys: - sorted_headers.append((key, header_dict.pop(key))) - return sorted_headers - - -class AzureSigningError(ClientAuthenticationError): - """ - Represents a fatal error when attempting to sign a request. - In general, the cause of this exception is user error. For example, the given account key is not valid. - Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info. - """ - - -class SharedKeyCredentialPolicy(SansIOHTTPPolicy): - - def __init__(self, account_name, account_key): - self.account_name = account_name - self.account_key = account_key - super(SharedKeyCredentialPolicy, self).__init__() - - @staticmethod - def _get_headers(request, headers_to_sign): - headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value) - if 'content-length' in headers and headers['content-length'] == '0': - del headers['content-length'] - return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n' - - @staticmethod - def _get_verb(request): - return request.http_request.method + '\n' - - def _get_canonicalized_resource(self, request): - uri_path = urlparse(request.http_request.url).path - try: - if isinstance(request.context.transport, AioHttpTransport) or \ - isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport) or \ - isinstance(getattr(getattr(request.context.transport, "_transport", None), "_transport", None), - AioHttpTransport): - uri_path = URL(uri_path) - return '/' + self.account_name + str(uri_path) - except TypeError: - pass - return '/' + self.account_name + uri_path - - @staticmethod - def _get_canonicalized_headers(request): - string_to_sign = '' - x_ms_headers = [] - for name, value in request.http_request.headers.items(): - if name.startswith('x-ms-'): - x_ms_headers.append((name.lower(), value)) - x_ms_headers = _storage_header_sort(x_ms_headers) - for name, value in x_ms_headers: - if value is not None: - string_to_sign += ''.join([name, ':', value, '\n']) - return string_to_sign - - @staticmethod - def _get_canonicalized_resource_query(request): - sorted_queries = list(request.http_request.query.items()) - sorted_queries.sort() - - string_to_sign = '' - for name, value in sorted_queries: - if value is not None: - string_to_sign += '\n' + name.lower() + ':' + unquote(value) - - return string_to_sign - - def _add_authorization_header(self, request, string_to_sign): - try: - signature = sign_string(self.account_key, string_to_sign) - auth_string = 'SharedKey ' + self.account_name + ':' + signature - request.http_request.headers['Authorization'] = auth_string - except Exception as ex: - # Wrap any error that occurred as signing error - # Doing so will clarify/locate the source of problem - raise _wrap_exception(ex, AzureSigningError) from ex - - def on_request(self, request): - string_to_sign = \ - self._get_verb(request) + \ - self._get_headers( - request, - [ - 'content-encoding', 'content-language', 'content-length', - 'content-md5', 'content-type', 'date', 'if-modified-since', - 'if-match', 'if-none-match', 'if-unmodified-since', 'byte_range' - ] - ) + \ - self._get_canonicalized_headers(request) + \ - self._get_canonicalized_resource(request) + \ - self._get_canonicalized_resource_query(request) - - self._add_authorization_header(request, string_to_sign) - # logger.debug("String_to_sign=%s", string_to_sign) - - -class StorageHttpChallenge(object): - def __init__(self, challenge): - """ Parses an HTTP WWW-Authentication Bearer challenge from the Storage service. """ - if not challenge: - raise ValueError("Challenge cannot be empty") - - self._parameters = {} - self.scheme, trimmed_challenge = challenge.strip().split(" ", 1) - - # name=value pairs either comma or space separated with values possibly being - # enclosed in quotes - for item in re.split('[, ]', trimmed_challenge): - comps = item.split("=") - if len(comps) == 2: - key = comps[0].strip(' "') - value = comps[1].strip(' "') - if key: - self._parameters[key] = value - - # Extract and verify required parameters - self.authorization_uri = self._parameters.get('authorization_uri') - if not self.authorization_uri: - raise ValueError("Authorization Uri not found") - - self.resource_id = self._parameters.get('resource_id') - if not self.resource_id: - raise ValueError("Resource id not found") - - uri_path = urlparse(self.authorization_uri).path.lstrip("/") - self.tenant_id = uri_path.split("/")[0] - - def get_value(self, key): - return self._parameters.get(key) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/__init__.py deleted file mode 100644 index 5b396cd202e8..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py deleted file mode 100644 index 3e46f1fb53fe..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py +++ /dev/null @@ -1,435 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-missing-return, docstring-missing-rtype - -"""Input/output utilities. - -Includes: - - i/o-specific constants - - i/o-specific exceptions - - schema validation - - leaf value encoding and decoding - - datum reader/writer stuff (?) - -Also includes a generic representation for data, which uses the -following mapping: - - Schema records are implemented as dict. - - Schema arrays are implemented as list. - - Schema maps are implemented as dict. - - Schema strings are implemented as unicode. - - Schema bytes are implemented as str. - - Schema ints are implemented as int. - - Schema longs are implemented as long. - - Schema floats are implemented as float. - - Schema doubles are implemented as float. - - Schema booleans are implemented as bool. -""" - -import json -import logging -import struct -import sys - -from ..avro import schema - -PY3 = sys.version_info[0] == 3 - -logger = logging.getLogger(__name__) - -# ------------------------------------------------------------------------------ -# Constants - -STRUCT_FLOAT = struct.Struct('= 0), n - input_bytes = self.reader.read(n) - if n > 0 and not input_bytes: - raise StopIteration - assert (len(input_bytes) == n), input_bytes - return input_bytes - - @staticmethod - def read_null(): - """ - null is written as zero bytes - """ - return None - - def read_boolean(self): - """ - a boolean is written as a single byte - whose value is either 0 (false) or 1 (true). - """ - b = ord(self.read(1)) - if b == 1: - return True - if b == 0: - return False - fail_msg = f"Invalid value for boolean: {b}" - raise schema.AvroException(fail_msg) - - def read_int(self): - """ - int and long values are written using variable-length, zig-zag coding. - """ - return self.read_long() - - def read_long(self): - """ - int and long values are written using variable-length, zig-zag coding. - """ - b = ord(self.read(1)) - n = b & 0x7F - shift = 7 - while (b & 0x80) != 0: - b = ord(self.read(1)) - n |= (b & 0x7F) << shift - shift += 7 - datum = (n >> 1) ^ -(n & 1) - return datum - - def read_float(self): - """ - A float is written as 4 bytes. - The float is converted into a 32-bit integer using a method equivalent to - Java's floatToIntBits and then encoded in little-endian format. - """ - return STRUCT_FLOAT.unpack(self.read(4))[0] - - def read_double(self): - """ - A double is written as 8 bytes. - The double is converted into a 64-bit integer using a method equivalent to - Java's doubleToLongBits and then encoded in little-endian format. - """ - return STRUCT_DOUBLE.unpack(self.read(8))[0] - - def read_bytes(self): - """ - Bytes are encoded as a long followed by that many bytes of data. - """ - nbytes = self.read_long() - assert (nbytes >= 0), nbytes - return self.read(nbytes) - - def read_utf8(self): - """ - A string is encoded as a long followed by - that many bytes of UTF-8 encoded character data. - """ - input_bytes = self.read_bytes() - if PY3: - try: - return input_bytes.decode('utf-8') - except UnicodeDecodeError as exn: - logger.error('Invalid UTF-8 input bytes: %r', input_bytes) - raise exn - else: - # PY2 - return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable - - def skip_null(self): - pass - - def skip_boolean(self): - self.skip(1) - - def skip_int(self): - self.skip_long() - - def skip_long(self): - b = ord(self.read(1)) - while (b & 0x80) != 0: - b = ord(self.read(1)) - - def skip_float(self): - self.skip(4) - - def skip_double(self): - self.skip(8) - - def skip_bytes(self): - self.skip(self.read_long()) - - def skip_utf8(self): - self.skip_bytes() - - def skip(self, n): - self.reader.seek(self.reader.tell() + n) - - -# ------------------------------------------------------------------------------ -# DatumReader - - -class DatumReader(object): - """Deserialize Avro-encoded data into a Python data structure.""" - - def __init__(self, writer_schema=None): - """ - As defined in the Avro specification, we call the schema encoded - in the data the "writer's schema". - """ - self._writer_schema = writer_schema - - # read/write properties - def set_writer_schema(self, writer_schema): - self._writer_schema = writer_schema - - writer_schema = property(lambda self: self._writer_schema, - set_writer_schema) - - def read(self, decoder): - return self.read_data(self.writer_schema, decoder) - - def read_data(self, writer_schema, decoder): - # function dispatch for reading data based on type of writer's schema - if writer_schema.type == 'null': - result = decoder.read_null() - elif writer_schema.type == 'boolean': - result = decoder.read_boolean() - elif writer_schema.type == 'string': - result = decoder.read_utf8() - elif writer_schema.type == 'int': - result = decoder.read_int() - elif writer_schema.type == 'long': - result = decoder.read_long() - elif writer_schema.type == 'float': - result = decoder.read_float() - elif writer_schema.type == 'double': - result = decoder.read_double() - elif writer_schema.type == 'bytes': - result = decoder.read_bytes() - elif writer_schema.type == 'fixed': - result = self.read_fixed(writer_schema, decoder) - elif writer_schema.type == 'enum': - result = self.read_enum(writer_schema, decoder) - elif writer_schema.type == 'array': - result = self.read_array(writer_schema, decoder) - elif writer_schema.type == 'map': - result = self.read_map(writer_schema, decoder) - elif writer_schema.type in ['union', 'error_union']: - result = self.read_union(writer_schema, decoder) - elif writer_schema.type in ['record', 'error', 'request']: - result = self.read_record(writer_schema, decoder) - else: - fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" - raise schema.AvroException(fail_msg) - return result - - def skip_data(self, writer_schema, decoder): - if writer_schema.type == 'null': - result = decoder.skip_null() - elif writer_schema.type == 'boolean': - result = decoder.skip_boolean() - elif writer_schema.type == 'string': - result = decoder.skip_utf8() - elif writer_schema.type == 'int': - result = decoder.skip_int() - elif writer_schema.type == 'long': - result = decoder.skip_long() - elif writer_schema.type == 'float': - result = decoder.skip_float() - elif writer_schema.type == 'double': - result = decoder.skip_double() - elif writer_schema.type == 'bytes': - result = decoder.skip_bytes() - elif writer_schema.type == 'fixed': - result = self.skip_fixed(writer_schema, decoder) - elif writer_schema.type == 'enum': - result = self.skip_enum(decoder) - elif writer_schema.type == 'array': - self.skip_array(writer_schema, decoder) - result = None - elif writer_schema.type == 'map': - self.skip_map(writer_schema, decoder) - result = None - elif writer_schema.type in ['union', 'error_union']: - result = self.skip_union(writer_schema, decoder) - elif writer_schema.type in ['record', 'error', 'request']: - self.skip_record(writer_schema, decoder) - result = None - else: - fail_msg = f"Unknown schema type: {writer_schema.type}" - raise schema.AvroException(fail_msg) - return result - - # Fixed instances are encoded using the number of bytes declared in the schema. - @staticmethod - def read_fixed(writer_schema, decoder): - return decoder.read(writer_schema.size) - - @staticmethod - def skip_fixed(writer_schema, decoder): - return decoder.skip(writer_schema.size) - - # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. - @staticmethod - def read_enum(writer_schema, decoder): - # read data - index_of_symbol = decoder.read_int() - if index_of_symbol >= len(writer_schema.symbols): - fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" - raise SchemaResolutionException(fail_msg, writer_schema) - read_symbol = writer_schema.symbols[index_of_symbol] - return read_symbol - - @staticmethod - def skip_enum(decoder): - return decoder.skip_int() - - # Arrays are encoded as a series of blocks. - - # Each block consists of a long count value, followed by that many array items. - # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. - - # If a block's count is negative, then the count is followed immediately by a long block size, - # indicating the number of bytes in the block. - # The actual count in this case is the absolute value of the count written. - def read_array(self, writer_schema, decoder): - read_items = [] - block_count = decoder.read_long() - while block_count != 0: - if block_count < 0: - block_count = -block_count - decoder.read_long() - for _ in range(block_count): - read_items.append(self.read_data(writer_schema.items, decoder)) - block_count = decoder.read_long() - return read_items - - def skip_array(self, writer_schema, decoder): - block_count = decoder.read_long() - while block_count != 0: - if block_count < 0: - block_size = decoder.read_long() - decoder.skip(block_size) - else: - for _ in range(block_count): - self.skip_data(writer_schema.items, decoder) - block_count = decoder.read_long() - - # Maps are encoded as a series of blocks. - - # Each block consists of a long count value, followed by that many key/value pairs. - # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. - - # If a block's count is negative, then the count is followed immediately by a long block size, - # indicating the number of bytes in the block. - # The actual count in this case is the absolute value of the count written. - def read_map(self, writer_schema, decoder): - read_items = {} - block_count = decoder.read_long() - while block_count != 0: - if block_count < 0: - block_count = -block_count - decoder.read_long() - for _ in range(block_count): - key = decoder.read_utf8() - read_items[key] = self.read_data(writer_schema.values, decoder) - block_count = decoder.read_long() - return read_items - - def skip_map(self, writer_schema, decoder): - block_count = decoder.read_long() - while block_count != 0: - if block_count < 0: - block_size = decoder.read_long() - decoder.skip(block_size) - else: - for _ in range(block_count): - decoder.skip_utf8() - self.skip_data(writer_schema.values, decoder) - block_count = decoder.read_long() - - # A union is encoded by first writing a long value indicating - # the zero-based position within the union of the schema of its value. - # The value is then encoded per the indicated schema within the union. - def read_union(self, writer_schema, decoder): - # schema resolution - index_of_schema = int(decoder.read_long()) - if index_of_schema >= len(writer_schema.schemas): - fail_msg = (f"Can't access branch index {index_of_schema} " - f"for union with {len(writer_schema.schemas)} branches") - raise SchemaResolutionException(fail_msg, writer_schema) - selected_writer_schema = writer_schema.schemas[index_of_schema] - - # read data - return self.read_data(selected_writer_schema, decoder) - - def skip_union(self, writer_schema, decoder): - index_of_schema = int(decoder.read_long()) - if index_of_schema >= len(writer_schema.schemas): - fail_msg = (f"Can't access branch index {index_of_schema} " - f"for union with {len(writer_schema.schemas)} branches") - raise SchemaResolutionException(fail_msg, writer_schema) - return self.skip_data(writer_schema.schemas[index_of_schema], decoder) - - # A record is encoded by encoding the values of its fields - # in the order that they are declared. In other words, a record - # is encoded as just the concatenation of the encodings of its fields. - # Field values are encoded per their schema. - - # Schema Resolution: - # * the ordering of fields may be different: fields are matched by name. - # * schemas for fields with the same name in both records are resolved - # recursively. - # * if the writer's record contains a field with a name not present in the - # reader's record, the writer's value for that field is ignored. - # * if the reader's record schema has a field that contains a default value, - # and writer's schema does not have a field with the same name, then the - # reader should use the default value from its field. - # * if the reader's record schema has a field with no default value, and - # writer's schema does not have a field with the same name, then the - # field's value is unset. - def read_record(self, writer_schema, decoder): - # schema resolution - read_record = {} - for field in writer_schema.fields: - field_val = self.read_data(field.type, decoder) - read_record[field.name] = field_val - return read_record - - def skip_record(self, writer_schema, decoder): - for field in writer_schema.fields: - self.skip_data(field.type, decoder) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py deleted file mode 100644 index 8688661b5add..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py +++ /dev/null @@ -1,419 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-missing-return, docstring-missing-rtype - -"""Input/output utilities. - -Includes: - - i/o-specific constants - - i/o-specific exceptions - - schema validation - - leaf value encoding and decoding - - datum reader/writer stuff (?) - -Also includes a generic representation for data, which uses the -following mapping: - - Schema records are implemented as dict. - - Schema arrays are implemented as list. - - Schema maps are implemented as dict. - - Schema strings are implemented as unicode. - - Schema bytes are implemented as str. - - Schema ints are implemented as int. - - Schema longs are implemented as long. - - Schema floats are implemented as float. - - Schema doubles are implemented as float. - - Schema booleans are implemented as bool. -""" - -import logging -import sys - -from ..avro import schema - -from .avro_io import STRUCT_FLOAT, STRUCT_DOUBLE, SchemaResolutionException - -PY3 = sys.version_info[0] == 3 - -logger = logging.getLogger(__name__) - -# ------------------------------------------------------------------------------ -# Decoder - - -class AsyncBinaryDecoder(object): - """Read leaf values.""" - - def __init__(self, reader): - """ - reader is a Python object on which we can call read, seek, and tell. - """ - self._reader = reader - - @property - def reader(self): - """Reports the reader used by this decoder.""" - return self._reader - - async def read(self, n): - """Read n bytes. - - :param int n: Number of bytes to read. - :returns: The next n bytes from the input. - :rtype: bytes - """ - assert (n >= 0), n - input_bytes = await self.reader.read(n) - if n > 0 and not input_bytes: - raise StopAsyncIteration - assert (len(input_bytes) == n), input_bytes - return input_bytes - - @staticmethod - def read_null(): - """ - null is written as zero bytes - """ - return None - - async def read_boolean(self): - """ - a boolean is written as a single byte - whose value is either 0 (false) or 1 (true). - """ - b = ord(await self.read(1)) - if b == 1: - return True - if b == 0: - return False - fail_msg = f"Invalid value for boolean: {b}" - raise schema.AvroException(fail_msg) - - async def read_int(self): - """ - int and long values are written using variable-length, zig-zag coding. - """ - return await self.read_long() - - async def read_long(self): - """ - int and long values are written using variable-length, zig-zag coding. - """ - b = ord(await self.read(1)) - n = b & 0x7F - shift = 7 - while (b & 0x80) != 0: - b = ord(await self.read(1)) - n |= (b & 0x7F) << shift - shift += 7 - datum = (n >> 1) ^ -(n & 1) - return datum - - async def read_float(self): - """ - A float is written as 4 bytes. - The float is converted into a 32-bit integer using a method equivalent to - Java's floatToIntBits and then encoded in little-endian format. - """ - return STRUCT_FLOAT.unpack(await self.read(4))[0] - - async def read_double(self): - """ - A double is written as 8 bytes. - The double is converted into a 64-bit integer using a method equivalent to - Java's doubleToLongBits and then encoded in little-endian format. - """ - return STRUCT_DOUBLE.unpack(await self.read(8))[0] - - async def read_bytes(self): - """ - Bytes are encoded as a long followed by that many bytes of data. - """ - nbytes = await self.read_long() - assert (nbytes >= 0), nbytes - return await self.read(nbytes) - - async def read_utf8(self): - """ - A string is encoded as a long followed by - that many bytes of UTF-8 encoded character data. - """ - input_bytes = await self.read_bytes() - if PY3: - try: - return input_bytes.decode('utf-8') - except UnicodeDecodeError as exn: - logger.error('Invalid UTF-8 input bytes: %r', input_bytes) - raise exn - else: - # PY2 - return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable - - def skip_null(self): - pass - - async def skip_boolean(self): - await self.skip(1) - - async def skip_int(self): - await self.skip_long() - - async def skip_long(self): - b = ord(await self.read(1)) - while (b & 0x80) != 0: - b = ord(await self.read(1)) - - async def skip_float(self): - await self.skip(4) - - async def skip_double(self): - await self.skip(8) - - async def skip_bytes(self): - await self.skip(await self.read_long()) - - async def skip_utf8(self): - await self.skip_bytes() - - async def skip(self, n): - await self.reader.seek(await self.reader.tell() + n) - - -# ------------------------------------------------------------------------------ -# DatumReader - - -class AsyncDatumReader(object): - """Deserialize Avro-encoded data into a Python data structure.""" - - def __init__(self, writer_schema=None): - """ - As defined in the Avro specification, we call the schema encoded - in the data the "writer's schema", and the schema expected by the - reader the "reader's schema". - """ - self._writer_schema = writer_schema - - # read/write properties - def set_writer_schema(self, writer_schema): - self._writer_schema = writer_schema - - writer_schema = property(lambda self: self._writer_schema, - set_writer_schema) - - async def read(self, decoder): - return await self.read_data(self.writer_schema, decoder) - - async def read_data(self, writer_schema, decoder): - # function dispatch for reading data based on type of writer's schema - if writer_schema.type == 'null': - result = decoder.read_null() - elif writer_schema.type == 'boolean': - result = await decoder.read_boolean() - elif writer_schema.type == 'string': - result = await decoder.read_utf8() - elif writer_schema.type == 'int': - result = await decoder.read_int() - elif writer_schema.type == 'long': - result = await decoder.read_long() - elif writer_schema.type == 'float': - result = await decoder.read_float() - elif writer_schema.type == 'double': - result = await decoder.read_double() - elif writer_schema.type == 'bytes': - result = await decoder.read_bytes() - elif writer_schema.type == 'fixed': - result = await self.read_fixed(writer_schema, decoder) - elif writer_schema.type == 'enum': - result = await self.read_enum(writer_schema, decoder) - elif writer_schema.type == 'array': - result = await self.read_array(writer_schema, decoder) - elif writer_schema.type == 'map': - result = await self.read_map(writer_schema, decoder) - elif writer_schema.type in ['union', 'error_union']: - result = await self.read_union(writer_schema, decoder) - elif writer_schema.type in ['record', 'error', 'request']: - result = await self.read_record(writer_schema, decoder) - else: - fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" - raise schema.AvroException(fail_msg) - return result - - async def skip_data(self, writer_schema, decoder): - if writer_schema.type == 'null': - result = decoder.skip_null() - elif writer_schema.type == 'boolean': - result = await decoder.skip_boolean() - elif writer_schema.type == 'string': - result = await decoder.skip_utf8() - elif writer_schema.type == 'int': - result = await decoder.skip_int() - elif writer_schema.type == 'long': - result = await decoder.skip_long() - elif writer_schema.type == 'float': - result = await decoder.skip_float() - elif writer_schema.type == 'double': - result = await decoder.skip_double() - elif writer_schema.type == 'bytes': - result = await decoder.skip_bytes() - elif writer_schema.type == 'fixed': - result = await self.skip_fixed(writer_schema, decoder) - elif writer_schema.type == 'enum': - result = await self.skip_enum(decoder) - elif writer_schema.type == 'array': - await self.skip_array(writer_schema, decoder) - result = None - elif writer_schema.type == 'map': - await self.skip_map(writer_schema, decoder) - result = None - elif writer_schema.type in ['union', 'error_union']: - result = await self.skip_union(writer_schema, decoder) - elif writer_schema.type in ['record', 'error', 'request']: - await self.skip_record(writer_schema, decoder) - result = None - else: - fail_msg = f"Unknown schema type: {writer_schema.type}" - raise schema.AvroException(fail_msg) - return result - - # Fixed instances are encoded using the number of bytes declared in the schema. - @staticmethod - async def read_fixed(writer_schema, decoder): - return await decoder.read(writer_schema.size) - - @staticmethod - async def skip_fixed(writer_schema, decoder): - return await decoder.skip(writer_schema.size) - - # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. - @staticmethod - async def read_enum(writer_schema, decoder): - # read data - index_of_symbol = await decoder.read_int() - if index_of_symbol >= len(writer_schema.symbols): - fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" - raise SchemaResolutionException(fail_msg, writer_schema) - read_symbol = writer_schema.symbols[index_of_symbol] - return read_symbol - - @staticmethod - async def skip_enum(decoder): - return await decoder.skip_int() - - # Arrays are encoded as a series of blocks. - - # Each block consists of a long count value, followed by that many array items. - # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. - - # If a block's count is negative, then the count is followed immediately by a long block size, - # indicating the number of bytes in the block. - # The actual count in this case is the absolute value of the count written. - async def read_array(self, writer_schema, decoder): - read_items = [] - block_count = await decoder.read_long() - while block_count != 0: - if block_count < 0: - block_count = -block_count - await decoder.read_long() - for _ in range(block_count): - read_items.append(await self.read_data(writer_schema.items, decoder)) - block_count = await decoder.read_long() - return read_items - - async def skip_array(self, writer_schema, decoder): - block_count = await decoder.read_long() - while block_count != 0: - if block_count < 0: - block_size = await decoder.read_long() - await decoder.skip(block_size) - else: - for _ in range(block_count): - await self.skip_data(writer_schema.items, decoder) - block_count = await decoder.read_long() - - # Maps are encoded as a series of blocks. - - # Each block consists of a long count value, followed by that many key/value pairs. - # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. - - # If a block's count is negative, then the count is followed immediately by a long block size, - # indicating the number of bytes in the block. - # The actual count in this case is the absolute value of the count written. - async def read_map(self, writer_schema, decoder): - read_items = {} - block_count = await decoder.read_long() - while block_count != 0: - if block_count < 0: - block_count = -block_count - await decoder.read_long() - for _ in range(block_count): - key = await decoder.read_utf8() - read_items[key] = await self.read_data(writer_schema.values, decoder) - block_count = await decoder.read_long() - return read_items - - async def skip_map(self, writer_schema, decoder): - block_count = await decoder.read_long() - while block_count != 0: - if block_count < 0: - block_size = await decoder.read_long() - await decoder.skip(block_size) - else: - for _ in range(block_count): - await decoder.skip_utf8() - await self.skip_data(writer_schema.values, decoder) - block_count = await decoder.read_long() - - # A union is encoded by first writing a long value indicating - # the zero-based position within the union of the schema of its value. - # The value is then encoded per the indicated schema within the union. - async def read_union(self, writer_schema, decoder): - # schema resolution - index_of_schema = int(await decoder.read_long()) - if index_of_schema >= len(writer_schema.schemas): - fail_msg = (f"Can't access branch index {index_of_schema} " - f"for union with {len(writer_schema.schemas)} branches") - raise SchemaResolutionException(fail_msg, writer_schema) - selected_writer_schema = writer_schema.schemas[index_of_schema] - - # read data - return await self.read_data(selected_writer_schema, decoder) - - async def skip_union(self, writer_schema, decoder): - index_of_schema = int(await decoder.read_long()) - if index_of_schema >= len(writer_schema.schemas): - fail_msg = (f"Can't access branch index {index_of_schema} " - f"for union with {len(writer_schema.schemas)} branches") - raise SchemaResolutionException(fail_msg, writer_schema) - return await self.skip_data(writer_schema.schemas[index_of_schema], decoder) - - # A record is encoded by encoding the values of its fields - # in the order that they are declared. In other words, a record - # is encoded as just the concatenation of the encodings of its fields. - # Field values are encoded per their schema. - - # Schema Resolution: - # * the ordering of fields may be different: fields are matched by name. - # * schemas for fields with the same name in both records are resolved - # recursively. - # * if the writer's record contains a field with a name not present in the - # reader's record, the writer's value for that field is ignored. - # * if the reader's record schema has a field that contains a default value, - # and writer's schema does not have a field with the same name, then the - # reader should use the default value from its field. - # * if the reader's record schema has a field with no default value, and - # writer's schema does not have a field with the same name, then the - # field's value is unset. - async def read_record(self, writer_schema, decoder): - # schema resolution - read_record = {} - for field in writer_schema.fields: - field_val = await self.read_data(field.type, decoder) - read_record[field.name] = field_val - return read_record - - async def skip_record(self, writer_schema, decoder): - for field in writer_schema.fields: - await self.skip_data(field.type, decoder) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py deleted file mode 100644 index 757e0329cd07..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py +++ /dev/null @@ -1,257 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-missing-return, docstring-missing-rtype - -"""Read/Write Avro File Object Containers.""" - -import io -import logging -import sys -import zlib - -from ..avro import avro_io -from ..avro import schema - -PY3 = sys.version_info[0] == 3 - -logger = logging.getLogger(__name__) - -# ------------------------------------------------------------------------------ -# Constants - -# Version of the container file: -VERSION = 1 - -if PY3: - MAGIC = b'Obj' + bytes([VERSION]) - MAGIC_SIZE = len(MAGIC) -else: - MAGIC = 'Obj' + chr(VERSION) - MAGIC_SIZE = len(MAGIC) - -# Size of the synchronization marker, in number of bytes: -SYNC_SIZE = 16 - -# Schema of the container header: -META_SCHEMA = schema.parse(""" -{ - "type": "record", "name": "org.apache.avro.file.Header", - "fields": [{ - "name": "magic", - "type": {"type": "fixed", "name": "magic", "size": %(magic_size)d} - }, { - "name": "meta", - "type": {"type": "map", "values": "bytes"} - }, { - "name": "sync", - "type": {"type": "fixed", "name": "sync", "size": %(sync_size)d} - }] -} -""" % { - 'magic_size': MAGIC_SIZE, - 'sync_size': SYNC_SIZE, -}) - -# Codecs supported by container files: -VALID_CODECS = frozenset(['null', 'deflate']) - -# Metadata key associated to the schema: -SCHEMA_KEY = "avro.schema" - - -# ------------------------------------------------------------------------------ -# Exceptions - - -class DataFileException(schema.AvroException): - """Problem reading or writing file object containers.""" - -# ------------------------------------------------------------------------------ - - -class DataFileReader(object): # pylint: disable=too-many-instance-attributes - """Read files written by DataFileWriter.""" - - def __init__(self, reader, datum_reader, **kwargs): - """Initializes a new data file reader. - - Args: - reader: Open file to read from. - datum_reader: Avro datum reader. - """ - self._reader = reader - self._raw_decoder = avro_io.BinaryDecoder(reader) - self._header_reader = kwargs.pop('header_reader', None) - self._header_decoder = None if self._header_reader is None else avro_io.BinaryDecoder(self._header_reader) - self._datum_decoder = None # Maybe reset at every block. - self._datum_reader = datum_reader - - # In case self._reader only has partial content(without header). - # seek(0, 0) to make sure read the (partial)content from beginning. - self._reader.seek(0, 0) - - # read the header: magic, meta, sync - self._read_header() - - # ensure codec is valid - avro_codec_raw = self.get_meta('avro.codec') - if avro_codec_raw is None: - self.codec = "null" - else: - self.codec = avro_codec_raw.decode('utf-8') - if self.codec not in VALID_CODECS: - raise DataFileException(f"Unknown codec: {self.codec}.") - - # get ready to read - self._block_count = 0 - - # object_position is to support reading from current position in the future read, - # no need to downloading from the beginning of avro. - if hasattr(self._reader, 'object_position'): - self.reader.track_object_position() - - self._cur_object_index = 0 - # header_reader indicates reader only has partial content. The reader doesn't have block header, - # so we read use the block count stored last time. - # Also ChangeFeed only has codec==null, so use _raw_decoder is good. - if self._header_reader is not None: - self._datum_decoder = self._raw_decoder - - self.datum_reader.writer_schema = ( - schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) - - def __enter__(self): - return self - - def __exit__(self, data_type, value, traceback): - # Perform a close if there's no exception - if data_type is None: - self.close() - - def __iter__(self): - return self - - # read-only properties - @property - def reader(self): - return self._reader - - @property - def raw_decoder(self): - return self._raw_decoder - - @property - def datum_decoder(self): - return self._datum_decoder - - @property - def datum_reader(self): - return self._datum_reader - - @property - def sync_marker(self): - return self._sync_marker - - @property - def meta(self): - return self._meta - - # read/write properties - @property - def block_count(self): - return self._block_count - - def get_meta(self, key): - """Reports the value of a given metadata key. - - :param str key: Metadata key to report the value of. - :returns: Value associated to the metadata key, as bytes. - :rtype: bytes - """ - return self._meta.get(key) - - def _read_header(self): - header_reader = self._header_reader if self._header_reader else self._reader - header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder - - # seek to the beginning of the file to get magic block - header_reader.seek(0, 0) - - # read header into a dict - header = self.datum_reader.read_data(META_SCHEMA, header_decoder) - - # check magic number - if header.get('magic') != MAGIC: - fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." - raise schema.AvroException(fail_msg) - - # set metadata - self._meta = header['meta'] - - # set sync marker - self._sync_marker = header['sync'] - - def _read_block_header(self): - self._block_count = self.raw_decoder.read_long() - if self.codec == "null": - # Skip a long; we don't need to use the length. - self.raw_decoder.skip_long() - self._datum_decoder = self._raw_decoder - elif self.codec == 'deflate': - # Compressed data is stored as (length, data), which - # corresponds to how the "bytes" type is encoded. - data = self.raw_decoder.read_bytes() - # -15 is the log of the window size; negative indicates - # "raw" (no zlib headers) decompression. See zlib.h. - uncompressed = zlib.decompress(data, -15) - self._datum_decoder = avro_io.BinaryDecoder(io.BytesIO(uncompressed)) - else: - raise DataFileException(f"Unknown codec: {self.codec!r}") - - def _skip_sync(self): - """ - Read the length of the sync marker; if it matches the sync marker, - return True. Otherwise, seek back to where we started and return False. - """ - proposed_sync_marker = self.reader.read(SYNC_SIZE) - if SYNC_SIZE > 0 and not proposed_sync_marker: - raise StopIteration - if proposed_sync_marker != self.sync_marker: - self.reader.seek(-SYNC_SIZE, 1) - - def __next__(self): - """Return the next datum in the file.""" - if self.block_count == 0: - self._skip_sync() - - # object_position is to support reading from current position in the future read, - # no need to downloading from the beginning of avro file with this attr. - if hasattr(self._reader, 'object_position'): - self.reader.track_object_position() - self._cur_object_index = 0 - - self._read_block_header() - - datum = self.datum_reader.read(self.datum_decoder) - self._block_count -= 1 - self._cur_object_index += 1 - - # object_position is to support reading from current position in the future read, - # This will track the index of the next item to be read. - # This will also track the offset before the next sync marker. - if hasattr(self._reader, 'object_position'): - if self.block_count == 0: - # the next event to be read is at index 0 in the new chunk of blocks, - self.reader.track_object_position() - self.reader.set_object_index(0) - else: - self.reader.set_object_index(self._cur_object_index) - - return datum - - def close(self): - """Close this reader.""" - self.reader.close() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py deleted file mode 100644 index 85dc5cb582b3..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py +++ /dev/null @@ -1,210 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-missing-return, docstring-missing-rtype - -"""Read/Write Avro File Object Containers.""" - -import logging -import sys - -from ..avro import avro_io_async -from ..avro import schema -from .datafile import DataFileException -from .datafile import MAGIC, SYNC_SIZE, META_SCHEMA, SCHEMA_KEY - - -PY3 = sys.version_info[0] == 3 - -logger = logging.getLogger(__name__) - -# ------------------------------------------------------------------------------ -# Constants - -# Codecs supported by container files: -VALID_CODECS = frozenset(['null']) - - -class AsyncDataFileReader(object): # pylint: disable=too-many-instance-attributes - """Read files written by DataFileWriter.""" - - def __init__(self, reader, datum_reader, **kwargs): - """Initializes a new data file reader. - - Args: - reader: Open file to read from. - datum_reader: Avro datum reader. - """ - self._reader = reader - self._raw_decoder = avro_io_async.AsyncBinaryDecoder(reader) - self._header_reader = kwargs.pop('header_reader', None) - self._header_decoder = None if self._header_reader is None else \ - avro_io_async.AsyncBinaryDecoder(self._header_reader) - self._datum_decoder = None # Maybe reset at every block. - self._datum_reader = datum_reader - self.codec = "null" - self._block_count = 0 - self._cur_object_index = 0 - self._meta = None - self._sync_marker = None - - async def init(self): - # In case self._reader only has partial content(without header). - # seek(0, 0) to make sure read the (partial)content from beginning. - await self._reader.seek(0, 0) - - # read the header: magic, meta, sync - await self._read_header() - - # ensure codec is valid - avro_codec_raw = self.get_meta('avro.codec') - if avro_codec_raw is None: - self.codec = "null" - else: - self.codec = avro_codec_raw.decode('utf-8') - if self.codec not in VALID_CODECS: - raise DataFileException(f"Unknown codec: {self.codec}.") - - # get ready to read - self._block_count = 0 - - # object_position is to support reading from current position in the future read, - # no need to downloading from the beginning of avro. - if hasattr(self._reader, 'object_position'): - self.reader.track_object_position() - - # header_reader indicates reader only has partial content. The reader doesn't have block header, - # so we read use the block count stored last time. - # Also ChangeFeed only has codec==null, so use _raw_decoder is good. - if self._header_reader is not None: - self._datum_decoder = self._raw_decoder - self.datum_reader.writer_schema = ( - schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) - return self - - async def __aenter__(self): - return self - - async def __aexit__(self, data_type, value, traceback): - # Perform a close if there's no exception - if data_type is None: - self.close() - - def __aiter__(self): - return self - - # read-only properties - @property - def reader(self): - return self._reader - - @property - def raw_decoder(self): - return self._raw_decoder - - @property - def datum_decoder(self): - return self._datum_decoder - - @property - def datum_reader(self): - return self._datum_reader - - @property - def sync_marker(self): - return self._sync_marker - - @property - def meta(self): - return self._meta - - # read/write properties - @property - def block_count(self): - return self._block_count - - def get_meta(self, key): - """Reports the value of a given metadata key. - - :param str key: Metadata key to report the value of. - :returns: Value associated to the metadata key, as bytes. - :rtype: bytes - """ - return self._meta.get(key) - - async def _read_header(self): - header_reader = self._header_reader if self._header_reader else self._reader - header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder - - # seek to the beginning of the file to get magic block - await header_reader.seek(0, 0) - - # read header into a dict - header = await self.datum_reader.read_data(META_SCHEMA, header_decoder) - - # check magic number - if header.get('magic') != MAGIC: - fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." - raise schema.AvroException(fail_msg) - - # set metadata - self._meta = header['meta'] - - # set sync marker - self._sync_marker = header['sync'] - - async def _read_block_header(self): - self._block_count = await self.raw_decoder.read_long() - if self.codec == "null": - # Skip a long; we don't need to use the length. - await self.raw_decoder.skip_long() - self._datum_decoder = self._raw_decoder - else: - raise DataFileException(f"Unknown codec: {self.codec!r}") - - async def _skip_sync(self): - """ - Read the length of the sync marker; if it matches the sync marker, - return True. Otherwise, seek back to where we started and return False. - """ - proposed_sync_marker = await self.reader.read(SYNC_SIZE) - if SYNC_SIZE > 0 and not proposed_sync_marker: - raise StopAsyncIteration - if proposed_sync_marker != self.sync_marker: - await self.reader.seek(-SYNC_SIZE, 1) - - async def __anext__(self): - """Return the next datum in the file.""" - if self.block_count == 0: - await self._skip_sync() - - # object_position is to support reading from current position in the future read, - # no need to downloading from the beginning of avro file with this attr. - if hasattr(self._reader, 'object_position'): - await self.reader.track_object_position() - self._cur_object_index = 0 - - await self._read_block_header() - - datum = await self.datum_reader.read(self.datum_decoder) - self._block_count -= 1 - self._cur_object_index += 1 - - # object_position is to support reading from current position in the future read, - # This will track the index of the next item to be read. - # This will also track the offset before the next sync marker. - if hasattr(self._reader, 'object_position'): - if self.block_count == 0: - # the next event to be read is at index 0 in the new chunk of blocks, - await self.reader.track_object_position() - await self.reader.set_object_index(0) - else: - await self.reader.set_object_index(self._cur_object_index) - - return datum - - def close(self): - """Close this reader.""" - self.reader.close() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py deleted file mode 100644 index b20db6020c35..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py +++ /dev/null @@ -1,1177 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-missing-return, docstring-missing-rtype, too-many-lines - -"""Representation of Avro schemas. - -A schema may be one of: - - A record, mapping field names to field value data; - - An error, equivalent to a record; - - An enum, containing one of a small set of symbols; - - An array of values, all of the same schema; - - A map containing string/value pairs, each of a declared schema; - - A union of other schemas; - - A fixed sized binary object; - - A unicode string; - - A sequence of bytes; - - A 32-bit signed int; - - A 64-bit signed long; - - A 32-bit floating-point float; - - A 64-bit floating-point double; - - A boolean; - - Null. -""" - -import abc -import json -import logging -import re -logger = logging.getLogger(__name__) - -# ------------------------------------------------------------------------------ -# Constants - -# Log level more verbose than DEBUG=10, INFO=20, etc. -DEBUG_VERBOSE = 5 - -NULL = 'null' -BOOLEAN = 'boolean' -STRING = 'string' -BYTES = 'bytes' -INT = 'int' -LONG = 'long' -FLOAT = 'float' -DOUBLE = 'double' -FIXED = 'fixed' -ENUM = 'enum' -RECORD = 'record' -ERROR = 'error' -ARRAY = 'array' -MAP = 'map' -UNION = 'union' - -# Request and error unions are part of Avro protocols: -REQUEST = 'request' -ERROR_UNION = 'error_union' - -PRIMITIVE_TYPES = frozenset([ - NULL, - BOOLEAN, - STRING, - BYTES, - INT, - LONG, - FLOAT, - DOUBLE, -]) - -NAMED_TYPES = frozenset([ - FIXED, - ENUM, - RECORD, - ERROR, -]) - -VALID_TYPES = frozenset.union( - PRIMITIVE_TYPES, - NAMED_TYPES, - [ - ARRAY, - MAP, - UNION, - REQUEST, - ERROR_UNION, - ], -) - -SCHEMA_RESERVED_PROPS = frozenset([ - 'type', - 'name', - 'namespace', - 'fields', # Record - 'items', # Array - 'size', # Fixed - 'symbols', # Enum - 'values', # Map - 'doc', -]) - -FIELD_RESERVED_PROPS = frozenset([ - 'default', - 'name', - 'doc', - 'order', - 'type', -]) - -VALID_FIELD_SORT_ORDERS = frozenset([ - 'ascending', - 'descending', - 'ignore', -]) - - -# ------------------------------------------------------------------------------ -# Exceptions - - -class Error(Exception): - """Base class for errors in this module.""" - - -class AvroException(Error): - """Generic Avro schema error.""" - - -class SchemaParseException(AvroException): - """Error while parsing a JSON schema descriptor.""" - - -class Schema(metaclass=abc.ABCMeta): - """Abstract base class for all Schema classes.""" - - def __init__(self, data_type, other_props=None): - """Initializes a new schema object. - - Args: - data_type: Type of the schema to initialize. - other_props: Optional dictionary of additional properties. - """ - if data_type not in VALID_TYPES: - raise SchemaParseException(f'{data_type!r} is not a valid Avro type.') - - # All properties of this schema, as a map: property name -> property value - self._props = {} - - self._props['type'] = data_type - self._type = data_type - - if other_props: - self._props.update(other_props) - - @property - def namespace(self): - """Returns: the namespace this schema belongs to, if any, or None.""" - return self._props.get('namespace', None) - - @property - def type(self): - """Returns: the type of this schema.""" - return self._type - - @property - def doc(self): - """Returns: the documentation associated to this schema, if any, or None.""" - return self._props.get('doc', None) - - @property - def props(self): - """Reports all the properties of this schema. - - Includes all properties, reserved and non reserved. - JSON properties of this schema are directly generated from this dict. - - Returns: - A dictionary of properties associated to this schema. - """ - return self._props - - @property - def other_props(self): - """Returns: the dictionary of non-reserved properties.""" - return dict(filter_keys_out(items=self._props, keys=SCHEMA_RESERVED_PROPS)) - - def __str__(self): - """Returns: the JSON representation of this schema.""" - return json.dumps(self.to_json(names=None)) - - # Converts the schema object into its AVRO specification representation. - - # Schema types that have names (records, enums, and fixed) must be aware of not - # re-defining schemas that are already listed in the parameter names. - @abc.abstractmethod - def to_json(self, names): - ... - - -# ------------------------------------------------------------------------------ - - -_RE_NAME = re.compile(r'[A-Za-z_][A-Za-z0-9_]*') - -_RE_FULL_NAME = re.compile( - r'^' - r'[.]?(?:[A-Za-z_][A-Za-z0-9_]*[.])*' # optional namespace - r'([A-Za-z_][A-Za-z0-9_]*)' # name - r'$' -) - - -class Name(object): - """Representation of an Avro name.""" - - def __init__(self, name, namespace=None): - """Parses an Avro name. - - Args: - name: Avro name to parse (relative or absolute). - namespace: Optional explicit namespace if the name is relative. - """ - # Normalize: namespace is always defined as a string, possibly empty. - if namespace is None: - namespace = '' - - if '.' in name: - # name is absolute, namespace is ignored: - self._fullname = name - - match = _RE_FULL_NAME.match(self._fullname) - if match is None: - raise SchemaParseException( - f'Invalid absolute schema name: {self._fullname!r}.') - - self._name = match.group(1) - self._namespace = self._fullname[:-(len(self._name) + 1)] - - else: - # name is relative, combine with explicit namespace: - self._name = name - self._namespace = namespace - self._fullname = (self._name - if (not self._namespace) else - f'{self._namespace}.{self._name}') - - # Validate the fullname: - if _RE_FULL_NAME.match(self._fullname) is None: - raise SchemaParseException(f"Invalid schema name {self._fullname!r} inferred from " - f"name {self._name!r} and namespace {self._namespace!r}.") - - def __eq__(self, other): - if not isinstance(other, Name): - return NotImplemented - return self.fullname == other.fullname - - @property - def simple_name(self): - """Returns: the simple name part of this name.""" - return self._name - - @property - def namespace(self): - """Returns: this name's namespace, possible the empty string.""" - return self._namespace - - @property - def fullname(self): - """Returns: the full name.""" - return self._fullname - - -# ------------------------------------------------------------------------------ - - -class Names(object): - """Tracks Avro named schemas and default namespace during parsing.""" - - def __init__(self, default_namespace=None, names=None): - """Initializes a new name tracker. - - Args: - default_namespace: Optional default namespace. - names: Optional initial mapping of known named schemas. - """ - if names is None: - names = {} - self._names = names - self._default_namespace = default_namespace - - @property - def names(self): - """Returns: the mapping of known named schemas.""" - return self._names - - @property - def default_namespace(self): - """Returns: the default namespace, if any, or None.""" - return self._default_namespace - - def new_with_default_namespace(self, namespace): - """Creates a new name tracker from this tracker, but with a new default ns. - - :param Any namespace: New default namespace to use. - :returns: New name tracker with the specified default namespace. - :rtype: Names - """ - return Names(names=self._names, default_namespace=namespace) - - def get_name(self, name, namespace=None): - """Resolves the Avro name according to this name tracker's state. - - :param Any name: Name to resolve (absolute or relative). - :param Optional[Any] namespace: Optional explicit namespace. - :returns: The specified name, resolved according to this tracker. - :rtype: Name - """ - if namespace is None: - namespace = self._default_namespace - return Name(name=name, namespace=namespace) - - def get_schema(self, name, namespace=None): - """Resolves an Avro schema by name. - - :param Any name: Name (absolute or relative) of the Avro schema to look up. - :param Optional[Any] namespace: Optional explicit namespace. - :returns: The schema with the specified name, if any, or None - :rtype: Union[Any, None] - """ - avro_name = self.get_name(name=name, namespace=namespace) - return self._names.get(avro_name.fullname, None) - - # Given a properties, return properties with namespace removed if it matches the own default namespace - def prune_namespace(self, properties): - if self.default_namespace is None: - # I have no default -- no change - return properties - if 'namespace' not in properties: - # he has no namespace - no change - return properties - if properties['namespace'] != self.default_namespace: - # we're different - leave his stuff alone - return properties - # we each have a namespace and it's redundant. delete his. - prunable = properties.copy() - del prunable['namespace'] - return prunable - - def register(self, schema): - """Registers a new named schema in this tracker. - - :param Any schema: Named Avro schema to register in this tracker. - """ - if schema.fullname in VALID_TYPES: - raise SchemaParseException( - f'{schema.fullname} is a reserved type name.') - if schema.fullname in self.names: - raise SchemaParseException( - f'Avro name {schema.fullname!r} already exists.') - - logger.log(DEBUG_VERBOSE, 'Register new name for %r', schema.fullname) - self._names[schema.fullname] = schema - - -# ------------------------------------------------------------------------------ - - -class NamedSchema(Schema): - """Abstract base class for named schemas. - - Named schemas are enumerated in NAMED_TYPES. - """ - - def __init__( - self, - data_type, - name=None, - namespace=None, - names=None, - other_props=None, - ): - """Initializes a new named schema object. - - Args: - data_type: Type of the named schema. - name: Name (absolute or relative) of the schema. - namespace: Optional explicit namespace if name is relative. - names: Tracker to resolve and register Avro names. - other_props: Optional map of additional properties of the schema. - """ - assert (data_type in NAMED_TYPES), (f'Invalid named type: {data_type!r}') - self._avro_name = names.get_name(name=name, namespace=namespace) - - super(NamedSchema, self).__init__(data_type, other_props) - - names.register(self) - - self._props['name'] = self.name - if self.namespace: - self._props['namespace'] = self.namespace - - @property - def avro_name(self): - """Returns: the Name object describing this schema's name.""" - return self._avro_name - - @property - def name(self): - return self._avro_name.simple_name - - @property - def namespace(self): - return self._avro_name.namespace - - @property - def fullname(self): - return self._avro_name.fullname - - def name_ref(self, names): - """Reports this schema name relative to the specified name tracker. - - :param Any names: Avro name tracker to relativize this schema name against. - :returns: This schema name, relativized against the specified name tracker. - :rtype: Any - """ - if self.namespace == names.default_namespace: - return self.name - return self.fullname - - # Converts the schema object into its AVRO specification representation. - - # Schema types that have names (records, enums, and fixed) must be aware - # of not re-defining schemas that are already listed in the parameter names. - @abc.abstractmethod - def to_json(self, names): - ... - -# ------------------------------------------------------------------------------ - - -_NO_DEFAULT = object() - - -class Field(object): - """Representation of the schema of a field in a record.""" - - def __init__( - self, - data_type, - name, - index, - has_default, - default=_NO_DEFAULT, - order=None, - doc=None, - other_props=None - ): - """Initializes a new Field object. - - Args: - data_type: Avro schema of the field. - name: Name of the field. - index: 0-based position of the field. - has_default: - default: - order: - doc: - other_props: - """ - if (not isinstance(name, str)) or (not name): - raise SchemaParseException(f'Invalid record field name: {name!r}.') - if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS): - raise SchemaParseException(f'Invalid record field order: {order!r}.') - - # All properties of this record field: - self._props = {} - - self._has_default = has_default - if other_props: - self._props.update(other_props) - - self._index = index - self._type = self._props['type'] = data_type - self._name = self._props['name'] = name - - if has_default: - self._props['default'] = default - - if order is not None: - self._props['order'] = order - - if doc is not None: - self._props['doc'] = doc - - @property - def type(self): - """Returns: the schema of this field.""" - return self._type - - @property - def name(self): - """Returns: this field name.""" - return self._name - - @property - def index(self): - """Returns: the 0-based index of this field in the record.""" - return self._index - - @property - def default(self): - return self._props['default'] - - @property - def has_default(self): - return self._has_default - - @property - def order(self): - return self._props.get('order', None) - - @property - def doc(self): - return self._props.get('doc', None) - - @property - def props(self): - return self._props - - @property - def other_props(self): - return filter_keys_out(items=self._props, keys=FIELD_RESERVED_PROPS) - - def __str__(self): - return json.dumps(self.to_json()) - - def to_json(self, names=None): - if names is None: - names = Names() - to_dump = self.props.copy() - to_dump['type'] = self.type.to_json(names) - return to_dump - - def __eq__(self, that): - to_cmp = json.loads(str(self)) - return to_cmp == json.loads(str(that)) - - -# ------------------------------------------------------------------------------ -# Primitive Types - - -class PrimitiveSchema(Schema): - """Schema of a primitive Avro type. - - Valid primitive types are defined in PRIMITIVE_TYPES. - """ - - def __init__(self, data_type, other_props=None): - """Initializes a new schema object for the specified primitive type. - - Args: - data_type: Type of the schema to construct. Must be primitive. - """ - if data_type not in PRIMITIVE_TYPES: - raise AvroException(f'{data_type!r} is not a valid primitive type.') - super(PrimitiveSchema, self).__init__(data_type, other_props=other_props) - - @property - def name(self): - """Returns: the simple name of this schema.""" - # The name of a primitive type is the type itself. - return self.type - - @property - def fullname(self): - """Returns: the fully qualified name of this schema.""" - # The full name is the simple name for primitive schema. - return self.name - - def to_json(self, names=None): - if len(self.props) == 1: - return self.fullname - return self.props - - def __eq__(self, that): - return self.props == that.props - - -# ------------------------------------------------------------------------------ -# Complex Types (non-recursive) - - -class FixedSchema(NamedSchema): - def __init__( - self, - name, - namespace, - size, - names=None, - other_props=None, - ): - # Ensure valid ctor args - if not isinstance(size, int): - fail_msg = 'Fixed Schema requires a valid integer for size property.' - raise AvroException(fail_msg) - - super(FixedSchema, self).__init__( - data_type=FIXED, - name=name, - namespace=namespace, - names=names, - other_props=other_props, - ) - self._props['size'] = size - - @property - def size(self): - """Returns: the size of this fixed schema, in bytes.""" - return self._props['size'] - - def to_json(self, names=None): - if names is None: - names = Names() - if self.fullname in names.names: - return self.name_ref(names) - names.names[self.fullname] = self - return names.prune_namespace(self.props) - - def __eq__(self, that): - return self.props == that.props - - -# ------------------------------------------------------------------------------ - - -class EnumSchema(NamedSchema): - def __init__( - self, - name, - namespace, - symbols, - names=None, - doc=None, - other_props=None, - ): - """Initializes a new enumeration schema object. - - Args: - name: Simple name of this enumeration. - namespace: Optional namespace. - symbols: Ordered list of symbols defined in this enumeration. - names: - doc: - other_props: - """ - symbols = tuple(symbols) - symbol_set = frozenset(symbols) - if (len(symbol_set) != len(symbols) - or not all(map(lambda symbol: isinstance(symbol, str), symbols))): - raise AvroException( - f'Invalid symbols for enum schema: {symbols!r}.') - - super(EnumSchema, self).__init__( - data_type=ENUM, - name=name, - namespace=namespace, - names=names, - other_props=other_props, - ) - - self._props['symbols'] = symbols - if doc is not None: - self._props['doc'] = doc - - @property - def symbols(self): - """Returns: the symbols defined in this enum.""" - return self._props['symbols'] - - def to_json(self, names=None): - if names is None: - names = Names() - if self.fullname in names.names: - return self.name_ref(names) - names.names[self.fullname] = self - return names.prune_namespace(self.props) - - def __eq__(self, that): - return self.props == that.props - - -# ------------------------------------------------------------------------------ -# Complex Types (recursive) - - -class ArraySchema(Schema): - """Schema of an array.""" - - def __init__(self, items, other_props=None): - """Initializes a new array schema object. - - Args: - items: Avro schema of the array items. - other_props: - """ - super(ArraySchema, self).__init__( - data_type=ARRAY, - other_props=other_props, - ) - self._items_schema = items - self._props['items'] = items - - @property - def items(self): - """Returns: the schema of the items in this array.""" - return self._items_schema - - def to_json(self, names=None): - if names is None: - names = Names() - to_dump = self.props.copy() - item_schema = self.items - to_dump['items'] = item_schema.to_json(names) - return to_dump - - def __eq__(self, that): - to_cmp = json.loads(str(self)) - return to_cmp == json.loads(str(that)) - - -# ------------------------------------------------------------------------------ - - -class MapSchema(Schema): - """Schema of a map.""" - - def __init__(self, values, other_props=None): - """Initializes a new map schema object. - - Args: - values: Avro schema of the map values. - other_props: - """ - super(MapSchema, self).__init__( - data_type=MAP, - other_props=other_props, - ) - self._values_schema = values - self._props['values'] = values - - @property - def values(self): - """Returns: the schema of the values in this map.""" - return self._values_schema - - def to_json(self, names=None): - if names is None: - names = Names() - to_dump = self.props.copy() - to_dump['values'] = self.values.to_json(names) - return to_dump - - def __eq__(self, that): - to_cmp = json.loads(str(self)) - return to_cmp == json.loads(str(that)) - - -# ------------------------------------------------------------------------------ - - -class UnionSchema(Schema): - """Schema of a union.""" - - def __init__(self, schemas): - """Initializes a new union schema object. - - Args: - schemas: Ordered collection of schema branches in the union. - """ - super(UnionSchema, self).__init__(data_type=UNION) - self._schemas = tuple(schemas) - - # Validate the schema branches: - - # All named schema names are unique: - named_branches = tuple( - filter(lambda schema: schema.type in NAMED_TYPES, self._schemas)) - unique_names = frozenset(map(lambda schema: schema.fullname, named_branches)) - if len(unique_names) != len(named_branches): - schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) - raise AvroException(f'Invalid union branches with duplicate schema name:{schemas}') - - # Types are unique within unnamed schemas, and union is not allowed: - unnamed_branches = tuple( - filter(lambda schema: schema.type not in NAMED_TYPES, self._schemas)) - unique_types = frozenset(map(lambda schema: schema.type, unnamed_branches)) - if UNION in unique_types: - schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) - raise AvroException(f'Invalid union branches contain other unions:{schemas}') - if len(unique_types) != len(unnamed_branches): - schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) - raise AvroException(f'Invalid union branches with duplicate type:{schemas}') - - @property - def schemas(self): - """Returns: the ordered list of schema branches in the union.""" - return self._schemas - - def to_json(self, names=None): - if names is None: - names = Names() - to_dump = [] - for schema in self.schemas: - to_dump.append(schema.to_json(names)) - return to_dump - - def __eq__(self, that): - to_cmp = json.loads(str(self)) - return to_cmp == json.loads(str(that)) - - -# ------------------------------------------------------------------------------ - - -class ErrorUnionSchema(UnionSchema): - """Schema representing the declared errors of a protocol message.""" - - def __init__(self, schemas): - """Initializes an error-union schema. - - Args: - schema: collection of error schema. - """ - # Prepend "string" to handle system errors - schemas = [PrimitiveSchema(data_type=STRING)] + list(schemas) - super(ErrorUnionSchema, self).__init__(schemas=schemas) - - def to_json(self, names=None): - if names is None: - names = Names() - to_dump = [] - for schema in self.schemas: - # Don't print the system error schema - if schema.type == STRING: - continue - to_dump.append(schema.to_json(names)) - return to_dump - - -# ------------------------------------------------------------------------------ - - -class RecordSchema(NamedSchema): - """Schema of a record.""" - - @staticmethod - def _make_field(index, field_desc, names): - """Builds field schemas from a list of field JSON descriptors. - - :param int index: 0-based index of the field in the record. - :param Any field_desc: JSON descriptors of a record field. - :param Any names: The names for this schema. - :returns: The field schema. - :rtype: Field - """ - field_schema = schema_from_json_data( - json_data=field_desc['type'], - names=names, - ) - other_props = ( - dict(filter_keys_out(items=field_desc, keys=FIELD_RESERVED_PROPS))) - return Field( - data_type=field_schema, - name=field_desc['name'], - index=index, - has_default=('default' in field_desc), - default=field_desc.get('default', _NO_DEFAULT), - order=field_desc.get('order', None), - doc=field_desc.get('doc', None), - other_props=other_props, - ) - - @staticmethod - def make_field_list(field_desc_list, names): - """Builds field schemas from a list of field JSON descriptors. - Guarantees field name unicity. - - :param Any field_desc_list: Collection of field JSON descriptors. - :param Any names: The names for this schema. - :returns: Field schemas. - :rtype: Field - """ - for index, field_desc in enumerate(field_desc_list): - yield RecordSchema._make_field(index, field_desc, names) - - @staticmethod - def _make_field_map(fields): - """Builds the field map. - Guarantees field name unicity. - - :param Any fields: Iterable of field schema. - :returns: A map of field schemas, indexed by name. - :rtype: Dict[Any, Any] - """ - field_map = {} - for field in fields: - if field.name in field_map: - raise SchemaParseException( - f'Duplicate record field name {field.name!r}.') - field_map[field.name] = field - return field_map - - def __init__( - self, - name, - namespace, - fields=None, - make_fields=None, - names=None, - record_type=RECORD, - doc=None, - other_props=None - ): - """Initializes a new record schema object. - - Args: - name: Name of the record (absolute or relative). - namespace: Optional namespace the record belongs to, if name is relative. - fields: collection of fields to add to this record. - Exactly one of fields or make_fields must be specified. - make_fields: function creating the fields that belong to the record. - The function signature is: make_fields(names) -> ordered field list. - Exactly one of fields or make_fields must be specified. - names: - record_type: Type of the record: one of RECORD, ERROR or REQUEST. - Protocol requests are not named. - doc: - other_props: - """ - if record_type == REQUEST: - # Protocol requests are not named: - super(RecordSchema, self).__init__( - data_type=REQUEST, - other_props=other_props, - ) - elif record_type in [RECORD, ERROR]: - # Register this record name in the tracker: - super(RecordSchema, self).__init__( - data_type=record_type, - name=name, - namespace=namespace, - names=names, - other_props=other_props, - ) - else: - raise SchemaParseException( - f'Invalid record type: {record_type!r}.') - - if record_type in [RECORD, ERROR]: - avro_name = names.get_name(name=name, namespace=namespace) - nested_names = names.new_with_default_namespace(namespace=avro_name.namespace) - elif record_type == REQUEST: - # Protocol request has no name: no need to change default namespace: - nested_names = names - - if fields is None: - fields = make_fields(names=nested_names) - else: - assert make_fields is None - self._fields = tuple(fields) - - self._field_map = RecordSchema._make_field_map(self._fields) - - self._props['fields'] = fields - if doc is not None: - self._props['doc'] = doc - - @property - def fields(self): - """Returns: the field schemas, as an ordered tuple.""" - return self._fields - - @property - def field_map(self): - """Returns: a read-only map of the field schemas index by field names.""" - return self._field_map - - def to_json(self, names=None): - if names is None: - names = Names() - # Request records don't have names - if self.type == REQUEST: - return [f.to_json(names) for f in self.fields] - - if self.fullname in names.names: - return self.name_ref(names) - names.names[self.fullname] = self - - to_dump = names.prune_namespace(self.props.copy()) - to_dump['fields'] = [f.to_json(names) for f in self.fields] - return to_dump - - def __eq__(self, that): - to_cmp = json.loads(str(self)) - return to_cmp == json.loads(str(that)) - - -# ------------------------------------------------------------------------------ -# Module functions - - -def filter_keys_out(items, keys): - """Filters a collection of (key, value) items. - Exclude any item whose key belongs to keys. - - :param Dict[Any, Any] items: Dictionary of items to filter the keys out of. - :param Dict[Any, Any] keys: Dictionary of keys to filter the extracted keys against. - :returns: Filtered items. - :rtype: Tuple(Any, Any) - """ - for key, value in items.items(): - if key in keys: - continue - yield key, value - - -# ------------------------------------------------------------------------------ - - -def _schema_from_json_string(json_string, names): - if json_string in PRIMITIVE_TYPES: - return PrimitiveSchema(data_type=json_string) - - # Look for a known named schema: - schema = names.get_schema(name=json_string) - if schema is None: - raise SchemaParseException(f"Unknown named schema {json_string!r}, known names: {sorted(names.names)!r}.") - return schema - - -def _schema_from_json_array(json_array, names): - def MakeSchema(desc): - return schema_from_json_data(json_data=desc, names=names) - - return UnionSchema(map(MakeSchema, json_array)) - - -def _schema_from_json_object(json_object, names): - data_type = json_object.get('type') - if data_type is None: - raise SchemaParseException( - f'Avro schema JSON descriptor has no "type" property: {json_object!r}') - - other_props = dict( - filter_keys_out(items=json_object, keys=SCHEMA_RESERVED_PROPS)) - - if data_type in PRIMITIVE_TYPES: - # FIXME should not ignore other properties - result = PrimitiveSchema(data_type, other_props=other_props) - - elif data_type in NAMED_TYPES: - name = json_object.get('name') - namespace = json_object.get('namespace', names.default_namespace) - if data_type == FIXED: - size = json_object.get('size') - result = FixedSchema(name, namespace, size, names, other_props) - elif data_type == ENUM: - symbols = json_object.get('symbols') - doc = json_object.get('doc') - result = EnumSchema(name, namespace, symbols, names, doc, other_props) - - elif data_type in [RECORD, ERROR]: - field_desc_list = json_object.get('fields', ()) - - def MakeFields(names): - return tuple(RecordSchema.make_field_list(field_desc_list, names)) - - result = RecordSchema( - name=name, - namespace=namespace, - make_fields=MakeFields, - names=names, - record_type=data_type, - doc=json_object.get('doc'), - other_props=other_props, - ) - else: - raise ValueError(f'Internal error: unknown type {data_type!r}.') - - elif data_type in VALID_TYPES: - # Unnamed, non-primitive Avro type: - - if data_type == ARRAY: - items_desc = json_object.get('items') - if items_desc is None: - raise SchemaParseException(f'Invalid array schema descriptor with no "items" : {json_object!r}.') - result = ArraySchema( - items=schema_from_json_data(items_desc, names), - other_props=other_props, - ) - - elif data_type == MAP: - values_desc = json_object.get('values') - if values_desc is None: - raise SchemaParseException(f'Invalid map schema descriptor with no "values" : {json_object!r}.') - result = MapSchema( - values=schema_from_json_data(values_desc, names=names), - other_props=other_props, - ) - - elif data_type == ERROR_UNION: - error_desc_list = json_object.get('declared_errors') - assert error_desc_list is not None - error_schemas = map( - lambda desc: schema_from_json_data(desc, names=names), - error_desc_list) - result = ErrorUnionSchema(schemas=error_schemas) - - else: - raise ValueError(f'Internal error: unknown type {data_type!r}.') - else: - raise SchemaParseException(f'Invalid JSON descriptor for an Avro schema: {json_object!r}') - return result - - -# Parsers for the JSON data types: -_JSONDataParserTypeMap = { - str: _schema_from_json_string, - list: _schema_from_json_array, - dict: _schema_from_json_object, -} - - -def schema_from_json_data(json_data, names=None): - """Builds an Avro Schema from its JSON descriptor. - Raises SchemaParseException if the descriptor is invalid. - - :param Any json_data: JSON data representing the descriptor of the Avro schema. - :param Any names: Optional tracker for Avro named schemas. - :returns: The Avro schema parsed from the JSON descriptor. - :rtype: Any - """ - if names is None: - names = Names() - - # Select the appropriate parser based on the JSON data type: - parser = _JSONDataParserTypeMap.get(type(json_data)) - if parser is None: - raise SchemaParseException( - f'Invalid JSON descriptor for an Avro schema: {json_data!r}.') - return parser(json_data, names=names) - - -# ------------------------------------------------------------------------------ - - -def parse(json_string): - """Constructs a Schema from its JSON descriptor in text form. - Raises SchemaParseException if a JSON parsing error is met, or if the JSON descriptor is invalid. - - :param str json_string: String representation of the JSON descriptor of the schema. - :returns: The parsed schema. - :rtype: Any - """ - try: - json_data = json.loads(json_string) - except Exception as exn: - raise SchemaParseException( - f'Error parsing schema from JSON: {json_string!r}. ' - f'Error message: {exn!r}.') from exn - - # Initialize the names object - names = Names() - - # construct the Avro Schema object - return schema_from_json_data(json_data, names) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py deleted file mode 100644 index 28c58255677b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py +++ /dev/null @@ -1,458 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -import logging -import uuid -from typing import ( - Any, - cast, - Dict, - Iterator, - Optional, - Tuple, - TYPE_CHECKING, - Union, -) -from urllib.parse import parse_qs, quote - -from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential -from azure.core.exceptions import HttpResponseError -from azure.core.pipeline import Pipeline -from azure.core.pipeline.transport import HttpTransport, RequestsTransport # pylint: disable=non-abstract-transport-import, no-name-in-module -from azure.core.pipeline.policies import ( - AzureSasCredentialPolicy, - ContentDecodePolicy, - DistributedTracingPolicy, - HttpLoggingPolicy, - ProxyPolicy, - RedirectPolicy, - UserAgentPolicy, -) - -from .authentication import SharedKeyCredentialPolicy -from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE -from .models import LocationMode, StorageConfiguration -from .policies import ( - ExponentialRetry, - QueueMessagePolicy, - StorageBearerTokenCredentialPolicy, - StorageContentValidation, - StorageHeadersPolicy, - StorageHosts, - StorageLoggingPolicy, - StorageRequestHook, - StorageResponseHook, -) -from .request_handlers import serialize_batch_body, _get_batch_request_delimiter -from .response_handlers import PartialBatchErrorException, process_storage_error -from .shared_access_signature import QueryStringConstants -from .._version import VERSION -from .._shared_access_signature import _is_credential_sastoken - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 - -_LOGGER = logging.getLogger(__name__) -_SERVICE_PARAMS = { - "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, - "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, - "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, - "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, -} - - -class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes - _client: Any - def __init__( - self, - parsed_url: Any, - service: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) - self._hosts = kwargs.get("_hosts") - self.scheme = parsed_url.scheme - self._is_localhost = False - - if service not in ["blob", "queue", "file-share", "dfs"]: - raise ValueError(f"Invalid service: {service}") - service_name = service.split('-')[0] - account = parsed_url.netloc.split(f".{service_name}.core.") - - self.account_name = account[0] if len(account) > 1 else None - if not self.account_name and parsed_url.netloc.startswith("localhost") \ - or parsed_url.netloc.startswith("127.0.0.1"): - self._is_localhost = True - self.account_name = parsed_url.path.strip("/") - - self.credential = _format_shared_key_credential(self.account_name, credential) - if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): - raise ValueError("Token credential is only supported with HTTPS.") - - secondary_hostname = None - if hasattr(self.credential, "account_name"): - self.account_name = self.credential.account_name - secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" - - if not self._hosts: - if len(account) > 1: - secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") - if kwargs.get("secondary_hostname"): - secondary_hostname = kwargs["secondary_hostname"] - primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/') - self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} - - self._sdk_moniker = f"storage-{service}/{VERSION}" - self._config, self._pipeline = self._create_pipeline(self.credential, sdk_moniker=self._sdk_moniker, **kwargs) - - def __enter__(self): - self._client.__enter__() - return self - - def __exit__(self, *args): - self._client.__exit__(*args) - - def close(self): - """ This method is to close the sockets opened by the client. - It need not be used when using with a context manager. - """ - self._client.close() - - @property - def url(self): - """The full endpoint URL to this entity, including SAS token if used. - - This could be either the primary endpoint, - or the secondary endpoint depending on the current :func:`location_mode`. - :returns: The full endpoint URL to this entity, including SAS token if used. - :rtype: str - """ - return self._format_url(self._hosts[self._location_mode]) - - @property - def primary_endpoint(self): - """The full primary endpoint URL. - - :rtype: str - """ - return self._format_url(self._hosts[LocationMode.PRIMARY]) - - @property - def primary_hostname(self): - """The hostname of the primary endpoint. - - :rtype: str - """ - return self._hosts[LocationMode.PRIMARY] - - @property - def secondary_endpoint(self): - """The full secondary endpoint URL if configured. - - If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional - `secondary_hostname` keyword argument on instantiation. - - :rtype: str - :raise ValueError: - """ - if not self._hosts[LocationMode.SECONDARY]: - raise ValueError("No secondary host configured.") - return self._format_url(self._hosts[LocationMode.SECONDARY]) - - @property - def secondary_hostname(self): - """The hostname of the secondary endpoint. - - If not available this will be None. To explicitly specify a secondary hostname, use the optional - `secondary_hostname` keyword argument on instantiation. - - :rtype: Optional[str] - """ - return self._hosts[LocationMode.SECONDARY] - - @property - def location_mode(self): - """The location mode that the client is currently using. - - By default this will be "primary". Options include "primary" and "secondary". - - :rtype: str - """ - - return self._location_mode - - @location_mode.setter - def location_mode(self, value): - if self._hosts.get(value): - self._location_mode = value - self._client._config.url = self.url # pylint: disable=protected-access - else: - raise ValueError(f"No host URL for location mode: {value}") - - @property - def api_version(self): - """The version of the Storage API used for requests. - - :rtype: str - """ - return self._client._config.version # pylint: disable=protected-access - - def _format_query_string( - self, sas_token: Optional[str], - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]], # pylint: disable=line-too-long - snapshot: Optional[str] = None, - share_snapshot: Optional[str] = None - ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]]]: # pylint: disable=line-too-long - query_str = "?" - if snapshot: - query_str += f"snapshot={snapshot}&" - if share_snapshot: - query_str += f"sharesnapshot={share_snapshot}&" - if sas_token and isinstance(credential, AzureSasCredential): - raise ValueError( - "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") - if _is_credential_sastoken(credential): - credential = cast(str, credential) - query_str += credential.lstrip("?") - credential = None - elif sas_token: - query_str += sas_token - return query_str.rstrip("?&"), credential - - def _create_pipeline( - self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Tuple[StorageConfiguration, Pipeline]: - self._credential_policy: Any = None - if hasattr(credential, "get_token"): - if kwargs.get('audience'): - audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE - else: - audience = STORAGE_OAUTH_SCOPE - self._credential_policy = StorageBearerTokenCredentialPolicy(cast(TokenCredential, credential), audience) - elif isinstance(credential, SharedKeyCredentialPolicy): - self._credential_policy = credential - elif isinstance(credential, AzureSasCredential): - self._credential_policy = AzureSasCredentialPolicy(credential) - elif credential is not None: - raise TypeError(f"Unsupported credential: {type(credential)}") - - config = kwargs.get("_configuration") or create_configuration(**kwargs) - if kwargs.get("_pipeline"): - return config, kwargs["_pipeline"] - transport = kwargs.get("transport") - kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) - kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not transport: - transport = RequestsTransport(**kwargs) - policies = [ - QueueMessagePolicy(), - config.proxy_policy, - config.user_agent_policy, - StorageContentValidation(), - ContentDecodePolicy(response_encoding="utf-8"), - RedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), - config.retry_policy, - config.headers_policy, - StorageRequestHook(**kwargs), - self._credential_policy, - config.logging_policy, - StorageResponseHook(**kwargs), - DistributedTracingPolicy(**kwargs), - HttpLoggingPolicy(**kwargs) - ] - if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore - config.transport = transport # type: ignore - return config, Pipeline(transport, policies=policies) - - def _batch_send( - self, - *reqs: "HttpRequest", - **kwargs: Any - ) -> Iterator["HttpResponse"]: - """Given a series of request, do a Storage batch call. - - :param HttpRequest reqs: A collection of HttpRequest objects. - :returns: An iterator of HttpResponse objects. - :rtype: Iterator[HttpResponse] - """ - # Pop it here, so requests doesn't feel bad about additional kwarg - raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) - batch_id = str(uuid.uuid1()) - - request = self._client._client.post( # pylint: disable=protected-access - url=( - f'{self.scheme}://{self.primary_hostname}/' - f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" - f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" - ), - headers={ - 'x-ms-version': self.api_version, - "Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False) - } - ) - - policies = [StorageHeadersPolicy()] - if self._credential_policy: - policies.append(self._credential_policy) - - request.set_multipart_mixed( - *reqs, - policies=policies, - enforce_https=False - ) - - Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access - body = serialize_batch_body(request.multipart_mixed_info[0], batch_id) - request.set_bytes_body(body) - - temp = request.multipart_mixed_info - request.multipart_mixed_info = None - pipeline_response = self._pipeline.run( - request, **kwargs - ) - response = pipeline_response.http_response - request.multipart_mixed_info = temp - - try: - if response.status_code not in [202]: - raise HttpResponseError(response=response) - parts = response.parts() - if raise_on_any_failure: - parts = list(response.parts()) - if any(p for p in parts if not 200 <= p.status_code < 300): - error = PartialBatchErrorException( - message="There is a partial failure in the batch operation.", - response=response, parts=parts - ) - raise error - return iter(parts) - return parts # type: ignore [no-any-return] - except HttpResponseError as error: - process_storage_error(error) - - -class TransportWrapper(HttpTransport): - """Wrapper class that ensures that an inner client created - by a `get_client` method does not close the outer transport for the parent - when used in a context manager. - """ - def __init__(self, transport): - self._transport = transport - - def send(self, request, **kwargs): - return self._transport.send(request, **kwargs) - - def open(self): - pass - - def close(self): - pass - - def __enter__(self): - pass - - def __exit__(self, *args): # pylint: disable=arguments-differ - pass - - -def _format_shared_key_credential( - account_name: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None, # pylint: disable=line-too-long -) -> Any: - if isinstance(credential, str): - if not account_name: - raise ValueError("Unable to determine account name for shared key credential.") - credential = {"account_name": account_name, "account_key": credential} - if isinstance(credential, dict): - if "account_name" not in credential: - raise ValueError("Shared key credential missing 'account_name") - if "account_key" not in credential: - raise ValueError("Shared key credential missing 'account_key") - return SharedKeyCredentialPolicy(**credential) - if isinstance(credential, AzureNamedKeyCredential): - return SharedKeyCredentialPolicy(credential.named_key.name, credential.named_key.key) - return credential - - -def parse_connection_str( - conn_str: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]], # pylint: disable=line-too-long - service: str -) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]]]: # pylint: disable=line-too-long - conn_str = conn_str.rstrip(";") - conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] - if any(len(tup) != 2 for tup in conn_settings_list): - raise ValueError("Connection string is either blank or malformed.") - conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) - endpoints = _SERVICE_PARAMS[service] - primary = None - secondary = None - if not credential: - try: - credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} - except KeyError: - credential = conn_settings.get("SHAREDACCESSSIGNATURE") - if endpoints["primary"] in conn_settings: - primary = conn_settings[endpoints["primary"]] - if endpoints["secondary"] in conn_settings: - secondary = conn_settings[endpoints["secondary"]] - else: - if endpoints["secondary"] in conn_settings: - raise ValueError("Connection string specifies only secondary endpoint.") - try: - primary =( - f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" - f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" - ) - secondary = ( - f"{conn_settings['ACCOUNTNAME']}-secondary." - f"{service}.{conn_settings['ENDPOINTSUFFIX']}" - ) - except KeyError: - pass - - if not primary: - try: - primary = ( - f"https://{conn_settings['ACCOUNTNAME']}." - f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" - ) - except KeyError as exc: - raise ValueError("Connection string missing required connection details.") from exc - if service == "dfs": - primary = primary.replace(".blob.", ".dfs.") - if secondary: - secondary = secondary.replace(".blob.", ".dfs.") - return primary, secondary, credential - - -def create_configuration(**kwargs: Any) -> StorageConfiguration: - # Backwards compatibility if someone is not passing sdk_moniker - if not kwargs.get("sdk_moniker"): - kwargs["sdk_moniker"] = f"storage-{kwargs.pop('storage_sdk')}/{VERSION}" - config = StorageConfiguration(**kwargs) - config.headers_policy = StorageHeadersPolicy(**kwargs) - config.user_agent_policy = UserAgentPolicy(**kwargs) - config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) - config.logging_policy = StorageLoggingPolicy(**kwargs) - config.proxy_policy = ProxyPolicy(**kwargs) - return config - - -def parse_query(query_str: str) -> Tuple[Optional[str], Optional[str]]: - sas_values = QueryStringConstants.to_list() - parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} - sas_params = [f"{k}={quote(v, safe='')}" for k, v in parsed_query.items() if k in sas_values] - sas_token = None - if sas_params: - sas_token = "&".join(sas_params) - - snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") - return snapshot, sas_token diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py deleted file mode 100644 index 98723edafbc3..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py +++ /dev/null @@ -1,280 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# mypy: disable-error-code="attr-defined" - -import logging -from typing import Any, cast, Dict, Optional, Tuple, TYPE_CHECKING, Union - -from azure.core.async_paging import AsyncList -from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential -from azure.core.credentials_async import AsyncTokenCredential -from azure.core.exceptions import HttpResponseError -from azure.core.pipeline import AsyncPipeline -from azure.core.pipeline.policies import ( - AsyncRedirectPolicy, - AzureSasCredentialPolicy, - ContentDecodePolicy, - DistributedTracingPolicy, - HttpLoggingPolicy, -) -from azure.core.pipeline.transport import AsyncHttpTransport - -from .authentication import SharedKeyCredentialPolicy -from .base_client import create_configuration -from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE -from .models import StorageConfiguration -from .policies import ( - QueueMessagePolicy, - StorageContentValidation, - StorageHeadersPolicy, - StorageHosts, - StorageRequestHook, -) -from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook -from .response_handlers import PartialBatchErrorException, process_storage_error -from .._shared_access_signature import _is_credential_sastoken - -if TYPE_CHECKING: - from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 -_LOGGER = logging.getLogger(__name__) - -_SERVICE_PARAMS = { - "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, - "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, - "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, - "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, -} - - -class AsyncStorageAccountHostsMixin(object): - - def __enter__(self): - raise TypeError("Async client only supports 'async with'.") - - def __exit__(self, *args): - pass - - async def __aenter__(self): - await self._client.__aenter__() - return self - - async def __aexit__(self, *args): - await self._client.__aexit__(*args) - - async def close(self): - """ This method is to close the sockets opened by the client. - It need not be used when using with a context manager. - """ - await self._client.close() - - def _format_query_string( - self, sas_token: Optional[str], - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]], # pylint: disable=line-too-long - snapshot: Optional[str] = None, - share_snapshot: Optional[str] = None - ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]]]: # pylint: disable=line-too-long - query_str = "?" - if snapshot: - query_str += f"snapshot={snapshot}&" - if share_snapshot: - query_str += f"sharesnapshot={share_snapshot}&" - if sas_token and isinstance(credential, AzureSasCredential): - raise ValueError( - "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") - if _is_credential_sastoken(credential): - query_str += credential.lstrip("?") # type: ignore [union-attr] - credential = None - elif sas_token: - query_str += sas_token - return query_str.rstrip("?&"), credential - - def _create_pipeline( - self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Tuple[StorageConfiguration, AsyncPipeline]: - self._credential_policy: Optional[ - Union[AsyncStorageBearerTokenCredentialPolicy, - SharedKeyCredentialPolicy, - AzureSasCredentialPolicy]] = None - if hasattr(credential, 'get_token'): - if kwargs.get('audience'): - audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE - else: - audience = STORAGE_OAUTH_SCOPE - self._credential_policy = AsyncStorageBearerTokenCredentialPolicy( - cast(AsyncTokenCredential, credential), audience) - elif isinstance(credential, SharedKeyCredentialPolicy): - self._credential_policy = credential - elif isinstance(credential, AzureSasCredential): - self._credential_policy = AzureSasCredentialPolicy(credential) - elif credential is not None: - raise TypeError(f"Unsupported credential: {type(credential)}") - config = kwargs.get('_configuration') or create_configuration(**kwargs) - if kwargs.get('_pipeline'): - return config, kwargs['_pipeline'] - transport = kwargs.get('transport') - kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) - kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not transport: - try: - from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import - except ImportError as exc: - raise ImportError("Unable to create async transport. Please check aiohttp is installed.") from exc - transport = AioHttpTransport(**kwargs) - hosts = self._hosts - policies = [ - QueueMessagePolicy(), - config.headers_policy, - config.proxy_policy, - config.user_agent_policy, - StorageContentValidation(), - StorageRequestHook(**kwargs), - self._credential_policy, - ContentDecodePolicy(response_encoding="utf-8"), - AsyncRedirectPolicy(**kwargs), - StorageHosts(hosts=hosts, **kwargs), - config.retry_policy, - config.logging_policy, - AsyncStorageResponseHook(**kwargs), - DistributedTracingPolicy(**kwargs), - HttpLoggingPolicy(**kwargs), - ] - if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") #type: ignore - config.transport = transport #type: ignore - return config, AsyncPipeline(transport, policies=policies) #type: ignore - - async def _batch_send( - self, - *reqs: "HttpRequest", - **kwargs: Any - ) -> AsyncList["HttpResponse"]: - """Given a series of request, do a Storage batch call. - - :param HttpRequest reqs: A collection of HttpRequest objects. - :returns: An AsyncList of HttpResponse objects. - :rtype: AsyncList[HttpResponse] - """ - # Pop it here, so requests doesn't feel bad about additional kwarg - raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) - request = self._client._client.post( # pylint: disable=protected-access - url=( - f'{self.scheme}://{self.primary_hostname}/' - f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" - f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" - ), - headers={ - 'x-ms-version': self.api_version - } - ) - - policies = [StorageHeadersPolicy()] - if self._credential_policy: - policies.append(self._credential_policy) # type: ignore - - request.set_multipart_mixed( - *reqs, - policies=policies, - enforce_https=False - ) - - pipeline_response = await self._pipeline.run( - request, **kwargs - ) - response = pipeline_response.http_response - - try: - if response.status_code not in [202]: - raise HttpResponseError(response=response) - parts = response.parts() # Return an AsyncIterator - if raise_on_any_failure: - parts_list = [] - async for part in parts: - parts_list.append(part) - if any(p for p in parts_list if not 200 <= p.status_code < 300): - error = PartialBatchErrorException( - message="There is a partial failure in the batch operation.", - response=response, parts=parts_list - ) - raise error - return AsyncList(parts_list) - return parts # type: ignore [no-any-return] - except HttpResponseError as error: - process_storage_error(error) - -def parse_connection_str( - conn_str: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]], # pylint: disable=line-too-long - service: str -) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]]]: # pylint: disable=line-too-long - conn_str = conn_str.rstrip(";") - conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] - if any(len(tup) != 2 for tup in conn_settings_list): - raise ValueError("Connection string is either blank or malformed.") - conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) - endpoints = _SERVICE_PARAMS[service] - primary = None - secondary = None - if not credential: - try: - credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} - except KeyError: - credential = conn_settings.get("SHAREDACCESSSIGNATURE") - if endpoints["primary"] in conn_settings: - primary = conn_settings[endpoints["primary"]] - if endpoints["secondary"] in conn_settings: - secondary = conn_settings[endpoints["secondary"]] - else: - if endpoints["secondary"] in conn_settings: - raise ValueError("Connection string specifies only secondary endpoint.") - try: - primary =( - f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" - f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" - ) - secondary = ( - f"{conn_settings['ACCOUNTNAME']}-secondary." - f"{service}.{conn_settings['ENDPOINTSUFFIX']}" - ) - except KeyError: - pass - - if not primary: - try: - primary = ( - f"https://{conn_settings['ACCOUNTNAME']}." - f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" - ) - except KeyError as exc: - raise ValueError("Connection string missing required connection details.") from exc - if service == "dfs": - primary = primary.replace(".blob.", ".dfs.") - if secondary: - secondary = secondary.replace(".blob.", ".dfs.") - return primary, secondary, credential - -class AsyncTransportWrapper(AsyncHttpTransport): - """Wrapper class that ensures that an inner client created - by a `get_client` method does not close the outer transport for the parent - when used in a context manager. - """ - def __init__(self, async_transport): - self._transport = async_transport - - async def send(self, request, **kwargs): - return await self._transport.send(request, **kwargs) - - async def open(self): - pass - - async def close(self): - pass - - async def __aenter__(self): - pass - - async def __aexit__(self, *args): # pylint: disable=arguments-differ - pass diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/constants.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/constants.py deleted file mode 100644 index 0b4b029a2d1b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/constants.py +++ /dev/null @@ -1,19 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from .._serialize import _SUPPORTED_API_VERSIONS - - -X_MS_VERSION = _SUPPORTED_API_VERSIONS[-1] - -# Default socket timeouts, in seconds -CONNECTION_TIMEOUT = 20 -READ_TIMEOUT = 60 - -DEFAULT_OAUTH_SCOPE = "/.default" -STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" - -SERVICE_HOST_BASE = 'core.windows.net' diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py deleted file mode 100644 index 33a70a72fd7a..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py +++ /dev/null @@ -1,582 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-instance-attributes -from enum import Enum -from typing import Optional - -from azure.core import CaseInsensitiveEnumMeta -from azure.core.configuration import Configuration -from azure.core.pipeline.policies import UserAgentPolicy - - -def get_enum_value(value): - if value is None or value in ["None", ""]: - return None - try: - return value.value - except AttributeError: - return value - - -class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - - # Generic storage values - ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" - ACCOUNT_BEING_CREATED = "AccountBeingCreated" - ACCOUNT_IS_DISABLED = "AccountIsDisabled" - AUTHENTICATION_FAILED = "AuthenticationFailed" - AUTHORIZATION_FAILURE = "AuthorizationFailure" - NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" - CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" - CONDITION_NOT_MET = "ConditionNotMet" - EMPTY_METADATA_KEY = "EmptyMetadataKey" - INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" - INTERNAL_ERROR = "InternalError" - INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" - INVALID_HEADER_VALUE = "InvalidHeaderValue" - INVALID_HTTP_VERB = "InvalidHttpVerb" - INVALID_INPUT = "InvalidInput" - INVALID_MD5 = "InvalidMd5" - INVALID_METADATA = "InvalidMetadata" - INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" - INVALID_RANGE = "InvalidRange" - INVALID_RESOURCE_NAME = "InvalidResourceName" - INVALID_URI = "InvalidUri" - INVALID_XML_DOCUMENT = "InvalidXmlDocument" - INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" - MD5_MISMATCH = "Md5Mismatch" - METADATA_TOO_LARGE = "MetadataTooLarge" - MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" - MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" - MISSING_REQUIRED_HEADER = "MissingRequiredHeader" - MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" - MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" - OPERATION_TIMED_OUT = "OperationTimedOut" - OUT_OF_RANGE_INPUT = "OutOfRangeInput" - OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" - REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" - RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" - REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" - RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" - RESOURCE_NOT_FOUND = "ResourceNotFound" - SERVER_BUSY = "ServerBusy" - UNSUPPORTED_HEADER = "UnsupportedHeader" - UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" - UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" - UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" - - # Blob values - APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" - BLOB_ALREADY_EXISTS = "BlobAlreadyExists" - BLOB_NOT_FOUND = "BlobNotFound" - BLOB_OVERWRITTEN = "BlobOverwritten" - BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" - BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" - BLOCK_LIST_TOO_LONG = "BlockListTooLong" - CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" - CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" - CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" - CONTAINER_BEING_DELETED = "ContainerBeingDeleted" - CONTAINER_DISABLED = "ContainerDisabled" - CONTAINER_NOT_FOUND = "ContainerNotFound" - CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" - COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" - COPY_ID_MISMATCH = "CopyIdMismatch" - FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" - INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" - INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" - #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. - INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" - INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" - INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" - INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" - INVALID_BLOB_TIER = "InvalidBlobTier" - INVALID_BLOB_TYPE = "InvalidBlobType" - INVALID_BLOCK_ID = "InvalidBlockId" - INVALID_BLOCK_LIST = "InvalidBlockList" - INVALID_OPERATION = "InvalidOperation" - INVALID_PAGE_RANGE = "InvalidPageRange" - INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" - INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" - INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" - LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" - LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" - LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" - LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" - LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" - LEASE_ID_MISSING = "LeaseIdMissing" - LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" - LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" - LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" - LEASE_LOST = "LeaseLost" - LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" - LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" - LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" - MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" - NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" - OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" - PENDING_COPY_OPERATION = "PendingCopyOperation" - PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" - PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" - PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" - SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" - SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" - SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" - SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" - #: Deprecated: Please use SNAPSHOT_OPERATION_RATE_EXCEEDED instead. - SNAPHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" - SNAPSHOTS_PRESENT = "SnapshotsPresent" - SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" - SYSTEM_IN_USE = "SystemInUse" - TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" - UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" - BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" - BLOB_ARCHIVED = "BlobArchived" - BLOB_NOT_ARCHIVED = "BlobNotArchived" - - # Queue values - INVALID_MARKER = "InvalidMarker" - MESSAGE_NOT_FOUND = "MessageNotFound" - MESSAGE_TOO_LARGE = "MessageTooLarge" - POP_RECEIPT_MISMATCH = "PopReceiptMismatch" - QUEUE_ALREADY_EXISTS = "QueueAlreadyExists" - QUEUE_BEING_DELETED = "QueueBeingDeleted" - QUEUE_DISABLED = "QueueDisabled" - QUEUE_NOT_EMPTY = "QueueNotEmpty" - QUEUE_NOT_FOUND = "QueueNotFound" - - # File values - CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" - CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" - DELETE_PENDING = "DeletePending" - DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" - FILE_LOCK_CONFLICT = "FileLockConflict" - INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" - PARENT_NOT_FOUND = "ParentNotFound" - READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" - SHARE_ALREADY_EXISTS = "ShareAlreadyExists" - SHARE_BEING_DELETED = "ShareBeingDeleted" - SHARE_DISABLED = "ShareDisabled" - SHARE_NOT_FOUND = "ShareNotFound" - SHARING_VIOLATION = "SharingViolation" - SHARE_SNAPSHOT_IN_PROGRESS = "ShareSnapshotInProgress" - SHARE_SNAPSHOT_COUNT_EXCEEDED = "ShareSnapshotCountExceeded" - SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" - SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" - CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" - - # DataLake values - CONTENT_LENGTH_MUST_BE_ZERO = 'ContentLengthMustBeZero' - PATH_ALREADY_EXISTS = 'PathAlreadyExists' - INVALID_FLUSH_POSITION = 'InvalidFlushPosition' - INVALID_PROPERTY_NAME = 'InvalidPropertyName' - INVALID_SOURCE_URI = 'InvalidSourceUri' - UNSUPPORTED_REST_VERSION = 'UnsupportedRestVersion' - FILE_SYSTEM_NOT_FOUND = 'FilesystemNotFound' - PATH_NOT_FOUND = 'PathNotFound' - RENAME_DESTINATION_PARENT_PATH_NOT_FOUND = 'RenameDestinationParentPathNotFound' - SOURCE_PATH_NOT_FOUND = 'SourcePathNotFound' - DESTINATION_PATH_IS_BEING_DELETED = 'DestinationPathIsBeingDeleted' - FILE_SYSTEM_ALREADY_EXISTS = 'FilesystemAlreadyExists' - FILE_SYSTEM_BEING_DELETED = 'FilesystemBeingDeleted' - INVALID_DESTINATION_PATH = 'InvalidDestinationPath' - INVALID_RENAME_SOURCE_PATH = 'InvalidRenameSourcePath' - INVALID_SOURCE_OR_DESTINATION_RESOURCE_TYPE = 'InvalidSourceOrDestinationResourceType' - LEASE_IS_ALREADY_BROKEN = 'LeaseIsAlreadyBroken' - LEASE_NAME_MISMATCH = 'LeaseNameMismatch' - PATH_CONFLICT = 'PathConflict' - SOURCE_PATH_IS_BEING_DELETED = 'SourcePathIsBeingDeleted' - - -class DictMixin(object): - - def __setitem__(self, key, item): - self.__dict__[key] = item - - def __getitem__(self, key): - return self.__dict__[key] - - def __repr__(self): - return str(self) - - def __len__(self): - return len(self.keys()) - - def __delitem__(self, key): - self.__dict__[key] = None - - # Compare objects by comparing all attributes. - def __eq__(self, other): - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - # Compare objects by comparing all attributes. - def __ne__(self, other): - return not self.__eq__(other) - - def __str__(self): - return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')}) - - def __contains__(self, key): - return key in self.__dict__ - - def has_key(self, k): - return k in self.__dict__ - - def update(self, *args, **kwargs): - return self.__dict__.update(*args, **kwargs) - - def keys(self): - return [k for k in self.__dict__ if not k.startswith('_')] - - def values(self): - return [v for k, v in self.__dict__.items() if not k.startswith('_')] - - def items(self): - return [(k, v) for k, v in self.__dict__.items() if not k.startswith('_')] - - def get(self, key, default=None): - if key in self.__dict__: - return self.__dict__[key] - return default - - -class LocationMode(object): - """ - Specifies the location the request should be sent to. This mode only applies - for RA-GRS accounts which allow secondary read access. All other account types - must use PRIMARY. - """ - - PRIMARY = 'primary' #: Requests should be sent to the primary location. - SECONDARY = 'secondary' #: Requests should be sent to the secondary location, if possible. - - -class ResourceTypes(object): - """ - Specifies the resource types that are accessible with the account SAS. - - :param bool service: - Access to service-level APIs (e.g., Get/Set Service Properties, - Get Service Stats, List Containers/Queues/Shares) - :param bool container: - Access to container-level APIs (e.g., Create/Delete Container, - Create/Delete Queue, Create/Delete Share, - List Blobs/Files and Directories) - :param bool object: - Access to object-level APIs for blobs, queue messages, and - files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.) - """ - - service: bool = False - container: bool = False - object: bool = False - _str: str - - def __init__( - self, - service: bool = False, - container: bool = False, - object: bool = False # pylint: disable=redefined-builtin - ) -> None: - self.service = service - self.container = container - self.object = object - self._str = (('s' if self.service else '') + - ('c' if self.container else '') + - ('o' if self.object else '')) - - def __str__(self): - return self._str - - @classmethod - def from_string(cls, string): - """Create a ResourceTypes from a string. - - To specify service, container, or object you need only to - include the first letter of the word in the string. E.g. service and container, - you would provide a string "sc". - - :param str string: Specify service, container, or object in - in the string with the first letter of the word. - :return: A ResourceTypes object - :rtype: ~azure.storage.blob.ResourceTypes - """ - res_service = 's' in string - res_container = 'c' in string - res_object = 'o' in string - - parsed = cls(res_service, res_container, res_object) - parsed._str = string # pylint: disable = protected-access - return parsed - - -class AccountSasPermissions(object): - """ - :class:`~ResourceTypes` class to be used with generate_account_sas - function and for the AccessPolicies used with set_*_acl. There are two types of - SAS which may be used to grant resource access. One is to grant access to a - specific resource (resource-specific). Another is to grant access to the - entire service for a specific account and allow certain operations based on - perms found here. - - :param bool read: - Valid for all signed resources types (Service, Container, and Object). - Permits read permissions to the specified resource type. - :param bool write: - Valid for all signed resources types (Service, Container, and Object). - Permits write permissions to the specified resource type. - :param bool delete: - Valid for Container and Object resource types, except for queue messages. - :param bool delete_previous_version: - Delete the previous blob version for the versioning enabled storage account. - :param bool list: - Valid for Service and Container resource types only. - :param bool add: - Valid for the following Object resource types only: queue messages, and append blobs. - :param bool create: - Valid for the following Object resource types only: blobs and files. - Users can create new blobs or files, but may not overwrite existing - blobs or files. - :param bool update: - Valid for the following Object resource types only: queue messages. - :param bool process: - Valid for the following Object resource type only: queue messages. - :keyword bool tag: - To enable set or get tags on the blobs in the container. - :keyword bool filter_by_tags: - To enable get blobs by tags, this should be used together with list permission. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - Valid for Object resource type of Blob only. - """ - - read: bool = False - write: bool = False - delete: bool = False - delete_previous_version: bool = False - list: bool = False - add: bool = False - create: bool = False - update: bool = False - process: bool = False - tag: bool = False - filter_by_tags: bool = False - set_immutability_policy: bool = False - permanent_delete: bool = False - - def __init__( - self, - read: bool = False, - write: bool = False, - delete: bool = False, - list: bool = False, # pylint: disable=redefined-builtin - add: bool = False, - create: bool = False, - update: bool = False, - process: bool = False, - delete_previous_version: bool = False, - **kwargs - ) -> None: - self.read = read - self.write = write - self.delete = delete - self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) - self.list = list - self.add = add - self.create = create - self.update = update - self.process = process - self.tag = kwargs.pop('tag', False) - self.filter_by_tags = kwargs.pop('filter_by_tags', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) - self._str = (('r' if self.read else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + - ('l' if self.list else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('u' if self.update else '') + - ('p' if self.process else '') + - ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + - ('i' if self.set_immutability_policy else '') - ) - - def __str__(self): - return self._str - - @classmethod - def from_string(cls, permission): - """Create AccountSasPermissions from a string. - - To specify read, write, delete, etc. permissions you need only to - include the first letter of the word in the string. E.g. for read and write - permissions you would provide a string "rw". - - :param str permission: Specify permissions in - the string with the first letter of the word. - :return: An AccountSasPermissions object - :rtype: ~azure.storage.blob.AccountSasPermissions - """ - p_read = 'r' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission - p_list = 'l' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_update = 'u' in permission - p_process = 'p' in permission - p_tag = 't' in permission - p_filter_by_tags = 'f' in permission - p_set_immutability_policy = 'i' in permission - parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, - list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, - permanent_delete=p_permanent_delete) - - return parsed - - -class Services(object): - """Specifies the services accessible with the account SAS. - - :keyword bool blob: - Access for the `~azure.storage.blob.BlobServiceClient`. Default is False. - :keyword bool queue: - Access for the `~azure.storage.queue.QueueServiceClient`. Default is False. - :keyword bool fileshare: - Access for the `~azure.storage.fileshare.ShareServiceClient`. Default is False. - """ - - def __init__( - self, *, - blob: bool = False, - queue: bool = False, - fileshare: bool = False - ) -> None: - self.blob = blob - self.queue = queue - self.fileshare = fileshare - self._str = (('b' if self.blob else '') + - ('q' if self.queue else '') + - ('f' if self.fileshare else '')) - - def __str__(self): - return self._str - - @classmethod - def from_string(cls, string): - """Create Services from a string. - - To specify blob, queue, or file you need only to - include the first letter of the word in the string. E.g. for blob and queue - you would provide a string "bq". - - :param str string: Specify blob, queue, or file in - in the string with the first letter of the word. - :return: A Services object - :rtype: ~azure.storage.blob.Services - """ - res_blob = 'b' in string - res_queue = 'q' in string - res_file = 'f' in string - - parsed = cls(blob=res_blob, queue=res_queue, fileshare=res_file) - parsed._str = string # pylint: disable = protected-access - return parsed - - -class UserDelegationKey(object): - """ - Represents a user delegation key, provided to the user by Azure Storage - based on their Azure Active Directory access token. - - The fields are saved as simple strings since the user does not have to interact with this object; - to generate an identify SAS, the user can simply pass it to the right API. - """ - - signed_oid: Optional[str] = None - """Object ID of this token.""" - signed_tid: Optional[str] = None - """Tenant ID of the tenant that issued this token.""" - signed_start: Optional[str] = None - """The datetime this token becomes valid.""" - signed_expiry: Optional[str] = None - """The datetime this token expires.""" - signed_service: Optional[str] = None - """What service this key is valid for.""" - signed_version: Optional[str] = None - """The version identifier of the REST service that created this token.""" - value: Optional[str] = None - """The user delegation key.""" - - def __init__(self): - self.signed_oid = None - self.signed_tid = None - self.signed_start = None - self.signed_expiry = None - self.signed_service = None - self.signed_version = None - self.value = None - - -class StorageConfiguration(Configuration): - """ - Specifies the configurable values used in Azure Storage. - - :param int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :param int copy_polling_interval: The interval in seconds for polling copy operations. - :param int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :param int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. - :param bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :param int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :param int min_large_chunk_upload_threshold: The max size for a single put operation. - :param int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :param int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :param int max_range_size: The max range size for file upload. - - """ - - max_single_put_size: int - copy_polling_interval: int - max_block_size: int - min_large_block_upload_threshold: int - use_byte_buffer: bool - max_page_size: int - min_large_chunk_upload_threshold: int - max_single_get_size: int - max_chunk_get_size: int - max_range_size: int - user_agent_policy: UserAgentPolicy - - def __init__(self, **kwargs): - super(StorageConfiguration, self).__init__(**kwargs) - self.max_single_put_size = kwargs.pop('max_single_put_size', 64 * 1024 * 1024) - self.copy_polling_interval = 15 - self.max_block_size = kwargs.pop('max_block_size', 4 * 1024 * 1024) - self.min_large_block_upload_threshold = kwargs.get('min_large_block_upload_threshold', 4 * 1024 * 1024 + 1) - self.use_byte_buffer = kwargs.pop('use_byte_buffer', False) - self.max_page_size = kwargs.pop('max_page_size', 4 * 1024 * 1024) - self.min_large_chunk_upload_threshold = kwargs.pop('min_large_chunk_upload_threshold', 100 * 1024 * 1024 + 1) - self.max_single_get_size = kwargs.pop('max_single_get_size', 32 * 1024 * 1024) - self.max_chunk_get_size = kwargs.pop('max_chunk_get_size', 4 * 1024 * 1024) - self.max_range_size = kwargs.pop('max_range_size', 4 * 1024 * 1024) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/parser.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/parser.py deleted file mode 100644 index cd59cfe104ca..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/parser.py +++ /dev/null @@ -1,61 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import sys -from datetime import datetime, timezone -from typing import Optional - -EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS filetime -HUNDREDS_OF_NANOSECONDS = 10000000 - -if sys.version_info < (3,): - def _str(value): - if isinstance(value, unicode): # pylint: disable=undefined-variable - return value.encode('utf-8') - - return str(value) -else: - _str = str - - -def _to_utc_datetime(value: datetime) -> str: - return value.strftime('%Y-%m-%dT%H:%M:%SZ') - -def _rfc_1123_to_datetime(rfc_1123: str) -> Optional[datetime]: - """Converts an RFC 1123 date string to a UTC datetime. - - :param str rfc_1123: The time and date in RFC 1123 format. - :returns: The time and date in UTC datetime format. - :rtype: datetime - """ - if not rfc_1123: - return None - - return datetime.strptime(rfc_1123, "%a, %d %b %Y %H:%M:%S %Z") - -def _filetime_to_datetime(filetime: str) -> Optional[datetime]: - """Converts an MS filetime string to a UTC datetime. "0" indicates None. - If parsing MS Filetime fails, tries RFC 1123 as backup. - - :param str filetime: The time and date in MS filetime format. - :returns: The time and date in UTC datetime format. - :rtype: datetime - """ - if not filetime: - return None - - # Try to convert to MS Filetime - try: - temp_filetime = int(filetime) - if temp_filetime == 0: - return None - - return datetime.fromtimestamp((temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc) - except ValueError: - pass - - # Try RFC 1123 as backup - return _rfc_1123_to_datetime(filetime) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py deleted file mode 100644 index 07de6ad5579e..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ /dev/null @@ -1,695 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import base64 -import hashlib -import logging -import random -import re -import uuid -from io import SEEK_SET, UnsupportedOperation -from time import time -from typing import Any, Dict, Optional, TYPE_CHECKING -from urllib.parse import ( - parse_qsl, - urlencode, - urlparse, - urlunparse, -) -from wsgiref.handlers import format_date_time - -from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError -from azure.core.pipeline.policies import ( - BearerTokenCredentialPolicy, - HeadersPolicy, - HTTPPolicy, - NetworkTraceLoggingPolicy, - RequestHistory, - SansIOHTTPPolicy -) - -from .authentication import AzureSigningError, StorageHttpChallenge -from .constants import DEFAULT_OAUTH_SCOPE -from .models import LocationMode - -if TYPE_CHECKING: - from azure.core.credentials import TokenCredential - from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import - PipelineRequest, - PipelineResponse - ) - - -_LOGGER = logging.getLogger(__name__) - - -def encode_base64(data): - if isinstance(data, str): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - -# Are we out of retries? -def is_exhausted(settings): - retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) - retry_counts = list(filter(None, retry_counts)) - if not retry_counts: - return False - return min(retry_counts) < 0 - - -def retry_hook(settings, **kwargs): - if settings['hook']: - settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs) - - -# Is this method/status code retryable? (Based on allowlists and control -# variables such as the number of total retries to allow, whether to -# respect the Retry-After header, whether this header is present, and -# whether the returned status code is on the list of status codes to -# be retried upon on the presence of the aforementioned header) -def is_retry(response, mode): # pylint: disable=too-many-return-statements - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occurred, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - return False - - -def is_checksum_retry(response): - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - - -def urljoin(base_url, stub_url): - parsed = urlparse(base_url) - parsed = parsed._replace(path=parsed.path + '/' + stub_url) - return parsed.geturl() - - -class QueueMessagePolicy(SansIOHTTPPolicy): - - def on_request(self, request): - message_id = request.context.options.pop('queue_message_id', None) - if message_id: - request.http_request.url = urljoin( - request.http_request.url, - message_id) - - -class StorageHeadersPolicy(HeadersPolicy): - request_id_header_name = 'x-ms-client-request-id' - - def on_request(self, request: "PipelineRequest") -> None: - super(StorageHeadersPolicy, self).on_request(request) - current_time = format_date_time(time()) - request.http_request.headers['x-ms-date'] = current_time - - custom_id = request.context.options.pop('client_request_id', None) - request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1()) - - # def on_response(self, request, response): - # # raise exception if the echoed client request id from the service is not identical to the one we sent - # if self.request_id_header_name in response.http_response.headers: - - # client_request_id = request.http_request.headers.get(self.request_id_header_name) - - # if response.http_response.headers[self.request_id_header_name] != client_request_id: - # raise AzureError( - # "Echoed client request ID: {} does not match sent client request ID: {}. " - # "Service request ID: {}".format( - # response.http_response.headers[self.request_id_header_name], client_request_id, - # response.http_response.headers['x-ms-request-id']), - # response=response.http_response - # ) - - -class StorageHosts(SansIOHTTPPolicy): - - def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument - self.hosts = hosts - super(StorageHosts, self).__init__() - - def on_request(self, request: "PipelineRequest") -> None: - request.context.options['hosts'] = self.hosts - parsed_url = urlparse(request.http_request.url) - - # Detect what location mode we're currently requesting with - location_mode = LocationMode.PRIMARY - for key, value in self.hosts.items(): - if parsed_url.netloc == value: - location_mode = key - - # See if a specific location mode has been specified, and if so, redirect - use_location = request.context.options.pop('use_location', None) - if use_location: - # Lock retries to the specific location - request.context.options['retry_to_secondary'] = False - if use_location not in self.hosts: - raise ValueError(f"Attempting to use undefined host location {use_location}") - if use_location != location_mode: - # Update request URL to use the specified location - updated = parsed_url._replace(netloc=self.hosts[use_location]) - request.http_request.url = updated.geturl() - location_mode = use_location - - request.context.options['location_mode'] = location_mode - - -class StorageLoggingPolicy(NetworkTraceLoggingPolicy): - """A policy that logs HTTP request and response to the DEBUG logger. - - This accepts both global configuration, and per-request level with "enable_http_logger" - """ - - def __init__(self, logging_enable: bool = False, **kwargs) -> None: - self.logging_body = kwargs.pop("logging_body", False) - super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs) - - def on_request(self, request: "PipelineRequest") -> None: - http_request = request.http_request - options = request.context.options - self.logging_body = self.logging_body or options.pop("logging_body", False) - if options.pop("logging_enable", self.enable_http_logger): - request.context["logging_enable"] = True - if not _LOGGER.isEnabledFor(logging.DEBUG): - return - - try: - log_url = http_request.url - query_params = http_request.query - if 'sig' in query_params: - log_url = log_url.replace(query_params['sig'], "sig=*****") - _LOGGER.debug("Request URL: %r", log_url) - _LOGGER.debug("Request method: %r", http_request.method) - _LOGGER.debug("Request headers:") - for header, value in http_request.headers.items(): - if header.lower() == 'authorization': - value = '*****' - elif header.lower() == 'x-ms-copy-source' and 'sig' in value: - # take the url apart and scrub away the signed signature - scheme, netloc, path, params, query, fragment = urlparse(value) - parsed_qs = dict(parse_qsl(query)) - parsed_qs['sig'] = '*****' - - # the SAS needs to be put back together - value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment)) - - _LOGGER.debug(" %r: %r", header, value) - _LOGGER.debug("Request body:") - - if self.logging_body: - _LOGGER.debug(str(http_request.body)) - else: - # We don't want to log the binary data of a file upload. - _LOGGER.debug("Hidden body, please use logging_body to show body") - except Exception as err: # pylint: disable=broad-except - _LOGGER.debug("Failed to log request: %r", err) - - def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: - if response.context.pop("logging_enable", self.enable_http_logger): - if not _LOGGER.isEnabledFor(logging.DEBUG): - return - - try: - _LOGGER.debug("Response status: %r", response.http_response.status_code) - _LOGGER.debug("Response headers:") - for res_header, value in response.http_response.headers.items(): - _LOGGER.debug(" %r: %r", res_header, value) - - # We don't want to log binary data if the response is a file. - _LOGGER.debug("Response content:") - pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) - header = response.http_response.headers.get('content-disposition') - resp_content_type = response.http_response.headers.get("content-type", "") - - if header and pattern.match(header): - filename = header.partition('=')[2] - _LOGGER.debug("File attachments: %s", filename) - elif resp_content_type.endswith("octet-stream"): - _LOGGER.debug("Body contains binary data.") - elif resp_content_type.startswith("image"): - _LOGGER.debug("Body contains image data.") - - if self.logging_body and resp_content_type.startswith("text"): - _LOGGER.debug(response.http_response.text()) - elif self.logging_body: - try: - _LOGGER.debug(response.http_response.body()) - except ValueError: - _LOGGER.debug("Body is streamable") - - except Exception as err: # pylint: disable=broad-except - _LOGGER.debug("Failed to log response: %s", repr(err)) - - -class StorageRequestHook(SansIOHTTPPolicy): - - def __init__(self, **kwargs): # pylint: disable=unused-argument - self._request_callback = kwargs.get('raw_request_hook') - super(StorageRequestHook, self).__init__() - - def on_request(self, request: "PipelineRequest") -> None: - request_callback = request.context.options.pop('raw_request_hook', self._request_callback) - if request_callback: - request_callback(request) - - -class StorageResponseHook(HTTPPolicy): - - def __init__(self, **kwargs): # pylint: disable=unused-argument - self._response_callback = kwargs.get('raw_response_hook') - super(StorageResponseHook, self).__init__() - - def send(self, request: "PipelineRequest") -> "PipelineResponse": - # Values could be 0 - data_stream_total = request.context.get('data_stream_total') - if data_stream_total is None: - data_stream_total = request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') - if download_stream_current is None: - download_stream_current = request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') - if upload_stream_current is None: - upload_stream_current = request.context.options.pop('upload_stream_current', None) - - response_callback = request.context.get('response_callback') or \ - request.context.options.pop('raw_response_hook', self._response_callback) - - response = self.next.send(request) - - will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) - # Auth error could come from Bearer challenge, in which case this request will be made again - is_auth_error = response.http_response.status_code == 401 - should_update_counts = not (will_retry or is_auth_error) - - if should_update_counts and download_stream_current is not None: - download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) - if data_stream_total is None: - content_range = response.http_response.headers.get('Content-Range') - if content_range: - data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) - else: - data_stream_total = download_stream_current - elif should_update_counts and upload_stream_current is not None: - upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) - for pipeline_obj in [request, response]: - if hasattr(pipeline_obj, 'context'): - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current - if response_callback: - response_callback(response) - request.context['response_callback'] = response_callback - return response - - -class StorageContentValidation(SansIOHTTPPolicy): - """A simple policy that sends the given headers - with the request. - - This will overwrite any headers already defined in the request. - """ - header_name = 'Content-MD5' - - def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument - super(StorageContentValidation, self).__init__() - - @staticmethod - def get_content_md5(data): - # Since HTTP does not differentiate between no content and empty content, - # we have to perform a None check. - data = data or b"" - md5 = hashlib.md5() # nosec - if isinstance(data, bytes): - md5.update(data) - elif hasattr(data, 'read'): - pos = 0 - try: - pos = data.tell() - except: # pylint: disable=bare-except - pass - for chunk in iter(lambda: data.read(4096), b""): - md5.update(chunk) - try: - data.seek(pos, SEEK_SET) - except (AttributeError, IOError) as exc: - raise ValueError("Data should be bytes or a seekable file-like object.") from exc - else: - raise ValueError("Data should be bytes or a seekable file-like object.") - - return md5.digest() - - def on_request(self, request: "PipelineRequest") -> None: - validate_content = request.context.options.pop('validate_content', False) - if validate_content and request.http_request.method != 'GET': - computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data)) - request.http_request.headers[self.header_name] = computed_md5 - request.context['validate_content_md5'] = computed_md5 - request.context['validate_content'] = validate_content - - def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - computed_md5 = request.context.get('validate_content_md5') or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) - if response.http_response.headers['content-md5'] != computed_md5: - raise AzureError(( - f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', " - f"computed value is '{computed_md5}'."), - response=response.http_response - ) - - -class StorageRetryPolicy(HTTPPolicy): - """ - The base class for Exponential and Linear retries containing shared code. - """ - - total_retries: int - """The max number of retries.""" - connect_retries: int - """The max number of connect retries.""" - retry_read: int - """The max number of read retries.""" - retry_status: int - """The max number of status retries.""" - retry_to_secondary: bool - """Whether the secondary endpoint should be retried.""" - - def __init__(self, **kwargs: Any) -> None: - self.total_retries = kwargs.pop('retry_total', 10) - self.connect_retries = kwargs.pop('retry_connect', 3) - self.read_retries = kwargs.pop('retry_read', 3) - self.status_retries = kwargs.pop('retry_status', 3) - self.retry_to_secondary = kwargs.pop('retry_to_secondary', False) - super(StorageRetryPolicy, self).__init__() - - def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None: - """ - A function which sets the next host location on the request, if applicable. - - :param Dict[str, Any]] settings: The configurable values pertaining to the next host location. - :param PipelineRequest request: A pipeline request object. - """ - if settings['hosts'] and all(settings['hosts'].values()): - url = urlparse(request.url) - # If there's more than one possible location, retry to the alternative - if settings['mode'] == LocationMode.PRIMARY: - settings['mode'] = LocationMode.SECONDARY - else: - settings['mode'] = LocationMode.PRIMARY - updated = url._replace(netloc=settings['hosts'].get(settings['mode'])) - request.url = updated.geturl() - - def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]: - body_position = None - if hasattr(request.http_request.body, 'read'): - try: - body_position = request.http_request.body.tell() - except (AttributeError, UnsupportedOperation): - # if body position cannot be obtained, then retries will not work - pass - options = request.context.options - return { - 'total': options.pop("retry_total", self.total_retries), - 'connect': options.pop("retry_connect", self.connect_retries), - 'read': options.pop("retry_read", self.read_retries), - 'status': options.pop("retry_status", self.status_retries), - 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary), - 'mode': options.pop("location_mode", LocationMode.PRIMARY), - 'hosts': options.pop("hosts", None), - 'hook': options.pop("retry_hook", None), - 'body_position': body_position, - 'count': 0, - 'history': [] - } - - def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument - """ Formula for computing the current backoff. - Should be calculated by child class. - - :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. - :returns: The backoff time. - :rtype: float - """ - return 0 - - def sleep(self, settings, transport): - backoff = self.get_backoff_time(settings) - if not backoff or backoff < 0: - return - transport.sleep(backoff) - - def increment( - self, settings: Dict[str, Any], - request: "PipelineRequest", - response: Optional["PipelineResponse"] = None, - error: Optional[AzureError] = None - ) -> bool: - """Increment the retry counters. - - :param Dict[str, Any]] settings: The configurable values pertaining to the increment operation. - :param PipelineRequest request: A pipeline request object. - :param Optional[PipelineResponse] response: A pipeline response object. - :param error: An error encountered during the request, or - None if the response was received successfully. - :type error: Optional[AzureError] - :returns: Whether the retry attempts are exhausted. - :rtype: bool - """ - settings['total'] -= 1 - - if error and isinstance(error, ServiceRequestError): - # Errors when we're fairly sure that the server did not receive the - # request, so it should be safe to retry. - settings['connect'] -= 1 - settings['history'].append(RequestHistory(request, error=error)) - - elif error and isinstance(error, ServiceResponseError): - # Errors that occur after the request has been started, so we should - # assume that the server began processing it. - settings['read'] -= 1 - settings['history'].append(RequestHistory(request, error=error)) - - else: - # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the allowlist - if response: - settings['status'] -= 1 - settings['history'].append(RequestHistory(request, http_response=response)) - - if not is_exhausted(settings): - if request.method not in ['PUT'] and settings['retry_secondary']: - self._set_next_host_location(settings, request) - - # rewind the request body if it is a stream - if request.body and hasattr(request.body, 'read'): - # no position was saved, then retry would not work - if settings['body_position'] is None: - return False - try: - # attempt to rewind the body to the initial position - request.body.seek(settings['body_position'], SEEK_SET) - except (UnsupportedOperation, ValueError): - # if body is not seekable, then retry would not work - return False - settings['count'] += 1 - return True - return False - - def send(self, request): - retries_remaining = True - response = None - retry_settings = self.configure_retries(request) - while retries_remaining: - try: - response = self.next.send(request) - if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): - retries_remaining = self.increment( - retry_settings, - request=request.http_request, - response=response.http_response) - if retries_remaining: - retry_hook( - retry_settings, - request=request.http_request, - response=response.http_response, - error=None) - self.sleep(retry_settings, request.context.transport) - continue - break - except AzureError as err: - if isinstance(err, AzureSigningError): - raise - retries_remaining = self.increment( - retry_settings, request=request.http_request, error=err) - if retries_remaining: - retry_hook( - retry_settings, - request=request.http_request, - response=None, - error=err) - self.sleep(retry_settings, request.context.transport) - continue - raise err - if retry_settings['history']: - response.context['history'] = retry_settings['history'] - response.http_response.location_mode = retry_settings['mode'] - return response - - -class ExponentialRetry(StorageRetryPolicy): - """Exponential retry.""" - - initial_backoff: int - """The initial backoff interval, in seconds, for the first retry.""" - increment_base: int - """The base, in seconds, to increment the initial_backoff by after the - first retry.""" - random_jitter_range: int - """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" - - def __init__( - self, initial_backoff: int = 15, - increment_base: int = 3, - retry_total: int = 3, - retry_to_secondary: bool = False, - random_jitter_range: int = 3, - **kwargs: Any - ) -> None: - """ - Constructs an Exponential retry object. The initial_backoff is used for - the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int retry_total: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - super(ExponentialRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - - def get_backoff_time(self, settings: Dict[str, Any]) -> float: - """ - Calculates how long to sleep before retrying. - - :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time. - :returns: - A float indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: float - """ - random_generator = random.Random() - backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) - random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class LinearRetry(StorageRetryPolicy): - """Linear retry.""" - - initial_backoff: int - """The backoff interval, in seconds, between retries.""" - random_jitter_range: int - """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" - - def __init__( - self, backoff: int = 15, - retry_total: int = 3, - retry_to_secondary: bool = False, - random_jitter_range: int = 3, - **kwargs: Any - ) -> None: - """ - Constructs a Linear retry object. - - :param int backoff: - The backoff interval, in seconds, between retries. - :param int retry_total: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.backoff = backoff - self.random_jitter_range = random_jitter_range - super(LinearRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - - def get_backoff_time(self, settings: Dict[str, Any]) -> float: - """ - Calculates how long to sleep before retrying. - - :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. - :returns: - A float indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: float - """ - random_generator = random.Random() - # the backoff interval normally does not change, however there is the possibility - # that it was modified by accessing the property directly after initializing the object - random_range_start = self.backoff - self.random_jitter_range \ - if self.backoff > self.random_jitter_range else 0 - random_range_end = self.backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy): - """ Custom Bearer token credential policy for following Storage Bearer challenges """ - - def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None: - super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) - - def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: - try: - auth_header = response.http_response.headers.get("WWW-Authenticate") - challenge = StorageHttpChallenge(auth_header) - except ValueError: - return False - - scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE - self.authorize_request(request, scope, tenant_id=challenge.tenant_id) - - return True diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py deleted file mode 100644 index 67987a090f33..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ /dev/null @@ -1,296 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method - -import asyncio -import logging -import random -from typing import Any, Dict, TYPE_CHECKING - -from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError -from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy - -from .authentication import AzureSigningError, StorageHttpChallenge -from .constants import DEFAULT_OAUTH_SCOPE -from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import - PipelineRequest, - PipelineResponse - ) - - -_LOGGER = logging.getLogger(__name__) - - -async def retry_hook(settings, **kwargs): - if settings['hook']: - if asyncio.iscoroutine(settings['hook']): - await settings['hook']( - retry_count=settings['count'] - 1, - location_mode=settings['mode'], - **kwargs) - else: - settings['hook']( - retry_count=settings['count'] - 1, - location_mode=settings['mode'], - **kwargs) - - -async def is_checksum_retry(response): - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - - -class AsyncStorageResponseHook(AsyncHTTPPolicy): - - def __init__(self, **kwargs): # pylint: disable=unused-argument - self._response_callback = kwargs.get('raw_response_hook') - super(AsyncStorageResponseHook, self).__init__() - - async def send(self, request: "PipelineRequest") -> "PipelineResponse": - # Values could be 0 - data_stream_total = request.context.get('data_stream_total') - if data_stream_total is None: - data_stream_total = request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') - if download_stream_current is None: - download_stream_current = request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') - if upload_stream_current is None: - upload_stream_current = request.context.options.pop('upload_stream_current', None) - - response_callback = request.context.get('response_callback') or \ - request.context.options.pop('raw_response_hook', self._response_callback) - - response = await self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) - - # Auth error could come from Bearer challenge, in which case this request will be made again - is_auth_error = response.http_response.status_code == 401 - should_update_counts = not (will_retry or is_auth_error) - - if should_update_counts and download_stream_current is not None: - download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) - if data_stream_total is None: - content_range = response.http_response.headers.get('Content-Range') - if content_range: - data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) - else: - data_stream_total = download_stream_current - elif should_update_counts and upload_stream_current is not None: - upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) - for pipeline_obj in [request, response]: - if hasattr(pipeline_obj, 'context'): - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current - if response_callback: - if asyncio.iscoroutine(response_callback): - await response_callback(response) # type: ignore - else: - response_callback(response) - request.context['response_callback'] = response_callback - return response - -class AsyncStorageRetryPolicy(StorageRetryPolicy): - """ - The base class for Exponential and Linear retries containing shared code. - """ - - async def sleep(self, settings, transport): - backoff = self.get_backoff_time(settings) - if not backoff or backoff < 0: - return - await transport.sleep(backoff) - - async def send(self, request): - retries_remaining = True - response = None - retry_settings = self.configure_retries(request) - while retries_remaining: - try: - response = await self.next.send(request) - if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): - retries_remaining = self.increment( - retry_settings, - request=request.http_request, - response=response.http_response) - if retries_remaining: - await retry_hook( - retry_settings, - request=request.http_request, - response=response.http_response, - error=None) - await self.sleep(retry_settings, request.context.transport) - continue - break - except AzureError as err: - if isinstance(err, AzureSigningError): - raise - retries_remaining = self.increment( - retry_settings, request=request.http_request, error=err) - if retries_remaining: - await retry_hook( - retry_settings, - request=request.http_request, - response=None, - error=err) - await self.sleep(retry_settings, request.context.transport) - continue - raise err - if retry_settings['history']: - response.context['history'] = retry_settings['history'] - response.http_response.location_mode = retry_settings['mode'] - return response - - -class ExponentialRetry(AsyncStorageRetryPolicy): - """Exponential retry.""" - - initial_backoff: int - """The initial backoff interval, in seconds, for the first retry.""" - increment_base: int - """The base, in seconds, to increment the initial_backoff by after the - first retry.""" - random_jitter_range: int - """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" - - def __init__( - self, - initial_backoff: int = 15, - increment_base: int = 3, - retry_total: int = 3, - retry_to_secondary: bool = False, - random_jitter_range: int = 3, **kwargs - ) -> None: - """ - Constructs an Exponential retry object. The initial_backoff is used for - the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - super(ExponentialRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - - def get_backoff_time(self, settings: Dict[str, Any]) -> float: - """ - Calculates how long to sleep before retrying. - - :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) - random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class LinearRetry(AsyncStorageRetryPolicy): - """Linear retry.""" - - initial_backoff: int - """The backoff interval, in seconds, between retries.""" - random_jitter_range: int - """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" - - def __init__( - self, backoff: int = 15, - retry_total: int = 3, - retry_to_secondary: bool = False, - random_jitter_range: int = 3, - **kwargs: Any - ) -> None: - """ - Constructs a Linear retry object. - - :param int backoff: - The backoff interval, in seconds, between retries. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.backoff = backoff - self.random_jitter_range = random_jitter_range - super(LinearRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - - def get_backoff_time(self, settings: Dict[str, Any]) -> float: - """ - Calculates how long to sleep before retrying. - - :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - # the backoff interval normally does not change, however there is the possibility - # that it was modified by accessing the property directly after initializing the object - random_range_start = self.backoff - self.random_jitter_range \ - if self.backoff > self.random_jitter_range else 0 - random_range_end = self.backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy): - """ Custom Bearer token credential policy for following Storage Bearer challenges """ - - def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None: - super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) - - async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: - try: - auth_header = response.http_response.headers.get("WWW-Authenticate") - challenge = StorageHttpChallenge(auth_header) - except ValueError: - return False - - scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE - await self.authorize_request(request, scope, tenant_id=challenge.tenant_id) - - return True diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/request_handlers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/request_handlers.py deleted file mode 100644 index 99a483a019a3..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/request_handlers.py +++ /dev/null @@ -1,271 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Dict, Optional - -import logging -from os import fstat -import stat -from io import (SEEK_END, SEEK_SET, UnsupportedOperation) - -import isodate - - -_LOGGER = logging.getLogger(__name__) - -_REQUEST_DELIMITER_PREFIX = "batch_" -_HTTP1_1_IDENTIFIER = "HTTP/1.1" -_HTTP_LINE_ENDING = "\r\n" - - -def serialize_iso(attr): - """Serialize Datetime object into ISO-8601 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: ValueError if format invalid. - """ - if not attr: - return None - if isinstance(attr, str): - attr = isodate.parse_datetime(attr) - try: - utc = attr.utctimetuple() - if utc.tm_year > 9999 or utc.tm_year < 1: - raise OverflowError("Hit max or min date") - - date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}" - return date + 'Z' - except (ValueError, OverflowError) as err: - raise ValueError("Unable to serialize datetime object.") from err - except AttributeError as err: - raise TypeError("ISO-8601 object must be valid datetime object.") from err - -def get_length(data): - length = None - # Check if object implements the __len__ method, covers most input cases such as bytearray. - try: - length = len(data) - except: # pylint: disable=bare-except - pass - - if not length: - # Check if the stream is a file-like stream object. - # If so, calculate the size using the file descriptor. - try: - fileno = data.fileno() - except (AttributeError, UnsupportedOperation): - pass - else: - try: - mode = fstat(fileno).st_mode - if stat.S_ISREG(mode) or stat.S_ISLNK(mode): - #st_size only meaningful if regular file or symlink, other types - # e.g. sockets may return misleading sizes like 0 - return fstat(fileno).st_size - except OSError: - # Not a valid fileno, may be possible requests returned - # a socket number? - pass - - # If the stream is seekable and tell() is implemented, calculate the stream size. - try: - current_position = data.tell() - data.seek(0, SEEK_END) - length = data.tell() - current_position - data.seek(current_position, SEEK_SET) - except (AttributeError, OSError, UnsupportedOperation): - pass - - return length - - -def read_length(data): - try: - if hasattr(data, 'read'): - read_data = b'' - for chunk in iter(lambda: data.read(4096), b""): - read_data += chunk - return len(read_data), read_data - if hasattr(data, '__iter__'): - read_data = b'' - for chunk in data: - read_data += chunk - return len(read_data), read_data - except: # pylint: disable=bare-except - pass - raise ValueError("Unable to calculate content length, please specify.") - - -def validate_and_format_range_headers( - start_range, end_range, start_range_required=True, - end_range_required=True, check_content_md5=False, align_to_page=False): - # If end range is provided, start range must be provided - if (start_range_required or end_range is not None) and start_range is None: - raise ValueError("start_range value cannot be None.") - if end_range_required and end_range is None: - raise ValueError("end_range value cannot be None.") - - # Page ranges must be 512 aligned - if align_to_page: - if start_range is not None and start_range % 512 != 0: - raise ValueError(f"Invalid page blob start_range: {start_range}. " - "The size must be aligned to a 512-byte boundary.") - if end_range is not None and end_range % 512 != 511: - raise ValueError(f"Invalid page blob end_range: {end_range}. " - "The size must be aligned to a 512-byte boundary.") - - # Format based on whether end_range is present - range_header = None - if end_range is not None: - range_header = f'bytes={start_range}-{end_range}' - elif start_range is not None: - range_header = f"bytes={start_range}-" - - # Content MD5 can only be provided for a complete range less than 4MB in size - range_validation = None - if check_content_md5: - if start_range is None or end_range is None: - raise ValueError("Both start and end range required for MD5 content validation.") - if end_range - start_range > 4 * 1024 * 1024: - raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.") - range_validation = 'true' - - return range_header, range_validation - - -def add_metadata_headers(metadata=None): - # type: (Optional[Dict[str, str]]) -> Dict[str, str] - headers = {} - if metadata: - for key, value in metadata.items(): - headers[f'x-ms-meta-{key.strip()}'] = value.strip() if value else value - return headers - - -def serialize_batch_body(requests, batch_id): - """ - -- - - -- - (repeated as needed) - ---- - - Serializes the requests in this batch to a single HTTP mixed/multipart body. - - :param List[~azure.core.pipeline.transport.HttpRequest] requests: - a list of sub-request for the batch request - :param str batch_id: - to be embedded in batch sub-request delimiter - :returns: The body bytes for this batch. - :rtype: bytes - """ - - if requests is None or len(requests) == 0: - raise ValueError('Please provide sub-request(s) for this batch request') - - delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8') - newline_bytes = _HTTP_LINE_ENDING.encode('utf-8') - batch_body = [] - - content_index = 0 - for request in requests: - request.headers.update({ - "Content-ID": str(content_index), - "Content-Length": str(0) - }) - batch_body.append(delimiter_bytes) - batch_body.append(_make_body_from_sub_request(request)) - batch_body.append(newline_bytes) - content_index += 1 - - batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8')) - # final line of body MUST have \r\n at the end, or it will not be properly read by the service - batch_body.append(newline_bytes) - - return bytes().join(batch_body) - - -def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_dashes=False): - """ - Gets the delimiter used for this batch request's mixed/multipart HTTP format. - - :param str batch_id: - Randomly generated id - :param bool is_prepend_dashes: - Whether to include the starting dashes. Used in the body, but non on defining the delimiter. - :param bool is_append_dashes: - Whether to include the ending dashes. Used in the body on the closing delimiter only. - :returns: The delimiter, WITHOUT a trailing newline. - :rtype: str - """ - - prepend_dashes = '--' if is_prepend_dashes else '' - append_dashes = '--' if is_append_dashes else '' - - return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes - - -def _make_body_from_sub_request(sub_request): - """ - Content-Type: application/http - Content-ID: - Content-Transfer-Encoding: (if present) - - HTTP/ -
:
(repeated as necessary) - Content-Length: - (newline if content length > 0) - (if content length > 0) - - Serializes an http request. - - :param ~azure.core.pipeline.transport.HttpRequest sub_request: - Request to serialize. - :returns: The serialized sub-request in bytes - :rtype: bytes - """ - - # put the sub-request's headers into a list for efficient str concatenation - sub_request_body = [] - - # get headers for ease of manipulation; remove headers as they are used - headers = sub_request.headers - - # append opening headers - sub_request_body.append("Content-Type: application/http") - sub_request_body.append(_HTTP_LINE_ENDING) - - sub_request_body.append("Content-ID: ") - sub_request_body.append(headers.pop("Content-ID", "")) - sub_request_body.append(_HTTP_LINE_ENDING) - - sub_request_body.append("Content-Transfer-Encoding: binary") - sub_request_body.append(_HTTP_LINE_ENDING) - - # append blank line - sub_request_body.append(_HTTP_LINE_ENDING) - - # append HTTP verb and path and query and HTTP version - sub_request_body.append(sub_request.method) - sub_request_body.append(' ') - sub_request_body.append(sub_request.url) - sub_request_body.append(' ') - sub_request_body.append(_HTTP1_1_IDENTIFIER) - sub_request_body.append(_HTTP_LINE_ENDING) - - # append remaining headers (this will set the Content-Length, as it was set on `sub-request`) - for header_name, header_value in headers.items(): - if header_value is not None: - sub_request_body.append(header_name) - sub_request_body.append(": ") - sub_request_body.append(header_value) - sub_request_body.append(_HTTP_LINE_ENDING) - - # append blank line - sub_request_body.append(_HTTP_LINE_ENDING) - - return ''.join(sub_request_body).encode() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py deleted file mode 100644 index 432aa2666ace..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py +++ /dev/null @@ -1,200 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -import logging -from typing import NoReturn -from xml.etree.ElementTree import Element - -from azure.core.exceptions import ( - ClientAuthenticationError, - DecodeError, - HttpResponseError, - ResourceExistsError, - ResourceModifiedError, - ResourceNotFoundError, -) -from azure.core.pipeline.policies import ContentDecodePolicy - -from .authentication import AzureSigningError -from .models import get_enum_value, StorageErrorCode, UserDelegationKey -from .parser import _to_utc_datetime - - -_LOGGER = logging.getLogger(__name__) - - -class PartialBatchErrorException(HttpResponseError): - """There is a partial failure in batch operations. - - :param str message: The message of the exception. - :param response: Server response to be deserialized. - :param list parts: A list of the parts in multipart response. - """ - - def __init__(self, message, response, parts): - self.parts = parts - super(PartialBatchErrorException, self).__init__(message=message, response=response) - - -# Parses the blob length from the content range header: bytes 1-3/65537 -def parse_length_from_content_range(content_range): - if content_range is None: - return None - - # First, split in space and take the second half: '1-3/65537' - # Next, split on slash and take the second half: '65537' - # Finally, convert to an int: 65537 - return int(content_range.split(' ', 1)[1].split('/', 1)[1]) - - -def normalize_headers(headers): - normalized = {} - for key, value in headers.items(): - if key.startswith('x-ms-'): - key = key[5:] - normalized[key.lower().replace('-', '_')] = get_enum_value(value) - return normalized - - -def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument - try: - raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith("x-ms-meta-")} - except AttributeError: - raw_metadata = {k: v for k, v in response.headers.items() if k.startswith("x-ms-meta-")} - return {k[10:]: v for k, v in raw_metadata.items()} - - -def return_response_headers(response, deserialized, response_headers): # pylint: disable=unused-argument - return normalize_headers(response_headers) - - -def return_headers_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument - return normalize_headers(response_headers), deserialized - - -def return_context_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument - return response.http_response.location_mode, deserialized - - -def return_raw_deserialized(response, *_): - return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME] - - -def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches - raise_error = HttpResponseError - serialized = False - if isinstance(storage_error, AzureSigningError): - storage_error.message = storage_error.message + \ - '. This is likely due to an invalid shared key. Please check your shared key and try again.' - if not storage_error.response or storage_error.response.status_code in [200, 204]: - raise storage_error - # If it is one of those three then it has been serialized prior by the generated layer. - if isinstance(storage_error, (PartialBatchErrorException, - ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)): - serialized = True - error_code = storage_error.response.headers.get('x-ms-error-code') - error_message = storage_error.message - additional_data = {} - error_dict = {} - try: - error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) - try: - if error_body is None or len(error_body) == 0: - error_body = storage_error.response.reason - except AttributeError: - error_body = '' - # If it is an XML response - if isinstance(error_body, Element): - error_dict = { - child.tag.lower(): child.text - for child in error_body - } - # If it is a JSON response - elif isinstance(error_body, dict): - error_dict = error_body.get('error', {}) - elif not error_code: - _LOGGER.warning( - 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) - error_dict = {'message': str(error_body)} - - # If we extracted from a Json or XML response - # There is a chance error_dict is just a string - if error_dict and isinstance(error_dict, dict): - error_code = error_dict.get('code') - error_message = error_dict.get('message') - additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} - except DecodeError: - pass - - try: - # This check would be unnecessary if we have already serialized the error - if error_code and not serialized: - error_code = StorageErrorCode(error_code) - if error_code in [StorageErrorCode.condition_not_met, - StorageErrorCode.blob_overwritten]: - raise_error = ResourceModifiedError - if error_code in [StorageErrorCode.invalid_authentication_info, - StorageErrorCode.authentication_failed]: - raise_error = ClientAuthenticationError - if error_code in [StorageErrorCode.resource_not_found, - StorageErrorCode.cannot_verify_copy_source, - StorageErrorCode.blob_not_found, - StorageErrorCode.queue_not_found, - StorageErrorCode.container_not_found, - StorageErrorCode.parent_not_found, - StorageErrorCode.share_not_found]: - raise_error = ResourceNotFoundError - if error_code in [StorageErrorCode.account_already_exists, - StorageErrorCode.account_being_created, - StorageErrorCode.resource_already_exists, - StorageErrorCode.resource_type_mismatch, - StorageErrorCode.blob_already_exists, - StorageErrorCode.queue_already_exists, - StorageErrorCode.container_already_exists, - StorageErrorCode.container_being_deleted, - StorageErrorCode.queue_being_deleted, - StorageErrorCode.share_already_exists, - StorageErrorCode.share_being_deleted]: - raise_error = ResourceExistsError - except ValueError: - # Got an unknown error code - pass - - # Error message should include all the error properties - try: - error_message += f"\nErrorCode:{error_code.value}" - except AttributeError: - error_message += f"\nErrorCode:{error_code}" - for name, info in additional_data.items(): - error_message += f"\n{name}:{info}" - - # No need to create an instance if it has already been serialized by the generated layer - if serialized: - storage_error.message = error_message - error = storage_error - else: - error = raise_error(message=error_message, response=storage_error.response) - # Ensure these properties are stored in the error instance as well (not just the error message) - error.error_code = error_code - error.additional_info = additional_data - # error.args is what's surfaced on the traceback - show error message in all cases - error.args = (error.message,) - try: - # `from None` prevents us from double printing the exception (suppresses generated layer error context) - exec("raise error from None") # pylint: disable=exec-used # nosec - except SyntaxError as exc: - raise error from exc - - -def parse_to_internal_user_delegation_key(service_user_delegation_key): - internal_user_delegation_key = UserDelegationKey() - internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid - internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid - internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) - internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) - internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service - internal_user_delegation_key.signed_version = service_user_delegation_key.signed_version - internal_user_delegation_key.value = service_user_delegation_key.value - return internal_user_delegation_key diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py deleted file mode 100644 index 21ba30240cb6..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py +++ /dev/null @@ -1,252 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -from datetime import date - -from .parser import _str, _to_utc_datetime -from .constants import X_MS_VERSION -from . import sign_string, url_quote - -# cspell:ignoreRegExp rsc. -# cspell:ignoreRegExp s..?id -class QueryStringConstants(object): - SIGNED_SIGNATURE = 'sig' - SIGNED_PERMISSION = 'sp' - SIGNED_START = 'st' - SIGNED_EXPIRY = 'se' - SIGNED_RESOURCE = 'sr' - SIGNED_IDENTIFIER = 'si' - SIGNED_IP = 'sip' - SIGNED_PROTOCOL = 'spr' - SIGNED_VERSION = 'sv' - SIGNED_CACHE_CONTROL = 'rscc' - SIGNED_CONTENT_DISPOSITION = 'rscd' - SIGNED_CONTENT_ENCODING = 'rsce' - SIGNED_CONTENT_LANGUAGE = 'rscl' - SIGNED_CONTENT_TYPE = 'rsct' - START_PK = 'spk' - START_RK = 'srk' - END_PK = 'epk' - END_RK = 'erk' - SIGNED_RESOURCE_TYPES = 'srt' - SIGNED_SERVICES = 'ss' - SIGNED_OID = 'skoid' - SIGNED_TID = 'sktid' - SIGNED_KEY_START = 'skt' - SIGNED_KEY_EXPIRY = 'ske' - SIGNED_KEY_SERVICE = 'sks' - SIGNED_KEY_VERSION = 'skv' - SIGNED_ENCRYPTION_SCOPE = 'ses' - - # for ADLS - SIGNED_AUTHORIZED_OID = 'saoid' - SIGNED_UNAUTHORIZED_OID = 'suoid' - SIGNED_CORRELATION_ID = 'scid' - SIGNED_DIRECTORY_DEPTH = 'sdd' - - @staticmethod - def to_list(): - return [ - QueryStringConstants.SIGNED_SIGNATURE, - QueryStringConstants.SIGNED_PERMISSION, - QueryStringConstants.SIGNED_START, - QueryStringConstants.SIGNED_EXPIRY, - QueryStringConstants.SIGNED_RESOURCE, - QueryStringConstants.SIGNED_IDENTIFIER, - QueryStringConstants.SIGNED_IP, - QueryStringConstants.SIGNED_PROTOCOL, - QueryStringConstants.SIGNED_VERSION, - QueryStringConstants.SIGNED_CACHE_CONTROL, - QueryStringConstants.SIGNED_CONTENT_DISPOSITION, - QueryStringConstants.SIGNED_CONTENT_ENCODING, - QueryStringConstants.SIGNED_CONTENT_LANGUAGE, - QueryStringConstants.SIGNED_CONTENT_TYPE, - QueryStringConstants.START_PK, - QueryStringConstants.START_RK, - QueryStringConstants.END_PK, - QueryStringConstants.END_RK, - QueryStringConstants.SIGNED_RESOURCE_TYPES, - QueryStringConstants.SIGNED_SERVICES, - QueryStringConstants.SIGNED_OID, - QueryStringConstants.SIGNED_TID, - QueryStringConstants.SIGNED_KEY_START, - QueryStringConstants.SIGNED_KEY_EXPIRY, - QueryStringConstants.SIGNED_KEY_SERVICE, - QueryStringConstants.SIGNED_KEY_VERSION, - QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, - # for ADLS - QueryStringConstants.SIGNED_AUTHORIZED_OID, - QueryStringConstants.SIGNED_UNAUTHORIZED_OID, - QueryStringConstants.SIGNED_CORRELATION_ID, - QueryStringConstants.SIGNED_DIRECTORY_DEPTH, - ] - - -class SharedAccessSignature(object): - ''' - Provides a factory for creating account access - signature tokens with an account name and account key. Users can either - use the factory or can construct the appropriate service and use the - generate_*_shared_access_signature method directly. - ''' - - def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): - ''' - :param str account_name: - The storage account name used to generate the shared access signatures. - :param str account_key: - The access key to generate the shares access signatures. - :param str x_ms_version: - The service version used to generate the shared access signatures. - ''' - self.account_name = account_name - self.account_key = account_key - self.x_ms_version = x_ms_version - - def generate_account( - self, services, - resource_types, - permission, - expiry, - start=None, - ip=None, - protocol=None, - sts_hook=None, - **kwargs - ) -> str: - ''' - Generates a shared access signature for the account. - Use the returned signature with the sas_token parameter of the service - or to create a new account object. - - :param Any services: The specified services associated with the shared access signature. - :param ResourceTypes resource_types: - Specifies the resource types that are accessible with the account - SAS. You can combine values to provide access to more than one - resource type. - :param AccountSasPermissions permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. You can combine - values to provide more than one permission. - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: datetime or str - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :param str protocol: - Specifies the protocol permitted for a request made. The default value - is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. - :keyword str encryption_scope: - Optional. If specified, this is the encryption scope to use when sending requests - authorized with this SAS URI. - :param sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :type sts_hook: Optional[Callable[[str], None]] - :returns: The generated SAS token for the account. - :rtype: str - ''' - sas = _SharedAccessHelper() - sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) - sas.add_account(services, resource_types) - sas.add_encryption_scope(**kwargs) - sas.add_account_signature(self.account_name, self.account_key) - - if sts_hook is not None: - sts_hook(sas.string_to_sign) - - return sas.get_token() - - -class _SharedAccessHelper(object): - def __init__(self): - self.query_dict = {} - self.string_to_sign = "" - - def _add_query(self, name, val): - if val: - self.query_dict[name] = _str(val) if val is not None else None - - def add_encryption_scope(self, **kwargs): - self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) - - def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): - if isinstance(start, date): - start = _to_utc_datetime(start) - - if isinstance(expiry, date): - expiry = _to_utc_datetime(expiry) - - self._add_query(QueryStringConstants.SIGNED_START, start) - self._add_query(QueryStringConstants.SIGNED_EXPIRY, expiry) - self._add_query(QueryStringConstants.SIGNED_PERMISSION, permission) - self._add_query(QueryStringConstants.SIGNED_IP, ip) - self._add_query(QueryStringConstants.SIGNED_PROTOCOL, protocol) - self._add_query(QueryStringConstants.SIGNED_VERSION, x_ms_version) - - def add_resource(self, resource): - self._add_query(QueryStringConstants.SIGNED_RESOURCE, resource) - - def add_id(self, policy_id): - self._add_query(QueryStringConstants.SIGNED_IDENTIFIER, policy_id) - - def add_account(self, services, resource_types): - self._add_query(QueryStringConstants.SIGNED_SERVICES, services) - self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types) - - def add_override_response_headers(self, cache_control, - content_disposition, - content_encoding, - content_language, - content_type): - self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control) - self._add_query(QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition) - self._add_query(QueryStringConstants.SIGNED_CONTENT_ENCODING, content_encoding) - self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) - self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) - - def add_account_signature(self, account_name, account_key): - def get_value_to_append(query): - return_value = self.query_dict.get(query) or '' - return return_value + '\n' - - string_to_sign = \ - (account_name + '\n' + - get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + - get_value_to_append(QueryStringConstants.SIGNED_SERVICES) + - get_value_to_append(QueryStringConstants.SIGNED_RESOURCE_TYPES) + - get_value_to_append(QueryStringConstants.SIGNED_START) + - get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + - get_value_to_append(QueryStringConstants.SIGNED_IP) + - get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + - get_value_to_append(QueryStringConstants.SIGNED_VERSION) + - get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) - - self._add_query(QueryStringConstants.SIGNED_SIGNATURE, - sign_string(account_key, string_to_sign)) - self.string_to_sign = string_to_sign - - def get_token(self) -> str: - return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None]) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py deleted file mode 100644 index b889a8622423..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py +++ /dev/null @@ -1,604 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from concurrent import futures -from io import BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation -from itertools import islice -from math import ceil -from threading import Lock - -from azure.core.tracing.common import with_current_context - -from .import encode_base64, url_quote -from .request_handlers import get_length -from .response_handlers import return_response_headers - - -_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 -_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = "{0} should be a seekable file-like/io.IOBase type stream object." - - -def _parallel_uploads(executor, uploader, pending, running): - range_ids = [] - while True: - # Wait for some download to finish before adding a new one - done, running = futures.wait(running, return_when=futures.FIRST_COMPLETED) - range_ids.extend([chunk.result() for chunk in done]) - try: - for _ in range(0, len(done)): - next_chunk = next(pending) - running.add(executor.submit(with_current_context(uploader), next_chunk)) - except StopIteration: - break - - # Wait for the remaining uploads to finish - done, _running = futures.wait(running) - range_ids.extend([chunk.result() for chunk in done]) - return range_ids - - -def upload_data_chunks( - service=None, - uploader_class=None, - total_size=None, - chunk_size=None, - max_concurrency=None, - stream=None, - validate_content=None, - progress_hook=None, - **kwargs): - - parallel = max_concurrency > 1 - if parallel and 'modified_access_conditions' in kwargs: - # Access conditions do not work with parallelism - kwargs['modified_access_conditions'] = None - - uploader = uploader_class( - service=service, - total_size=total_size, - chunk_size=chunk_size, - stream=stream, - parallel=parallel, - validate_content=validate_content, - progress_hook=progress_hook, - **kwargs) - if parallel: - with futures.ThreadPoolExecutor(max_concurrency) as executor: - upload_tasks = uploader.get_chunk_streams() - running_futures = [ - executor.submit(with_current_context(uploader.process_chunk), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) - else: - range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()] - if any(range_ids): - return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] - return uploader.response_headers - - -def upload_substream_blocks( - service=None, - uploader_class=None, - total_size=None, - chunk_size=None, - max_concurrency=None, - stream=None, - progress_hook=None, - **kwargs): - parallel = max_concurrency > 1 - if parallel and 'modified_access_conditions' in kwargs: - # Access conditions do not work with parallelism - kwargs['modified_access_conditions'] = None - uploader = uploader_class( - service=service, - total_size=total_size, - chunk_size=chunk_size, - stream=stream, - parallel=parallel, - progress_hook=progress_hook, - **kwargs) - - if parallel: - with futures.ThreadPoolExecutor(max_concurrency) as executor: - upload_tasks = uploader.get_substream_blocks() - running_futures = [ - executor.submit(with_current_context(uploader.process_substream_block), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) - else: - range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] - if any(range_ids): - return sorted(range_ids) - return [] - - -class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - - def __init__( - self, service, - total_size, - chunk_size, - stream, - parallel, - encryptor=None, - padder=None, - progress_hook=None, - **kwargs): - self.service = service - self.total_size = total_size - self.chunk_size = chunk_size - self.stream = stream - self.parallel = parallel - - # Stream management - self.stream_lock = Lock() if parallel else None - - # Progress feedback - self.progress_total = 0 - self.progress_lock = Lock() if parallel else None - self.progress_hook = progress_hook - - # Encryption - self.encryptor = encryptor - self.padder = padder - self.response_headers = None - self.etag = None - self.last_modified = None - self.request_options = kwargs - - def get_chunk_streams(self): - index = 0 - while True: - data = b"" - read_size = self.chunk_size - - # Buffer until we either reach the end of the stream or get a whole chunk. - while True: - if self.total_size: - read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) - temp = self.stream.read(read_size) - if not isinstance(temp, bytes): - raise TypeError("Blob data should be of type bytes.") - data += temp or b"" - - # We have read an empty string and so are at the end - # of the buffer or we have read a full chunk. - if temp == b"" or len(data) == self.chunk_size: - break - - if len(data) == self.chunk_size: - if self.padder: - data = self.padder.update(data) - if self.encryptor: - data = self.encryptor.update(data) - yield index, data - else: - if self.padder: - data = self.padder.update(data) + self.padder.finalize() - if self.encryptor: - data = self.encryptor.update(data) + self.encryptor.finalize() - if data: - yield index, data - break - index += len(data) - - def process_chunk(self, chunk_data): - chunk_bytes = chunk_data[1] - chunk_offset = chunk_data[0] - return self._upload_chunk_with_progress(chunk_offset, chunk_bytes) - - def _update_progress(self, length): - if self.progress_lock is not None: - with self.progress_lock: - self.progress_total += length - else: - self.progress_total += length - - if self.progress_hook: - self.progress_hook(self.progress_total, self.total_size) - - def _upload_chunk(self, chunk_offset, chunk_data): - raise NotImplementedError("Must be implemented by child class.") - - def _upload_chunk_with_progress(self, chunk_offset, chunk_data): - range_id = self._upload_chunk(chunk_offset, chunk_data) - self._update_progress(len(chunk_data)) - return range_id - - def get_substream_blocks(self): - assert self.chunk_size is not None - lock = self.stream_lock - blob_length = self.total_size - - if blob_length is None: - blob_length = get_length(self.stream) - if blob_length is None: - raise ValueError("Unable to determine content length of upload data.") - - blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) - last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size - - for i in range(blocks): - index = i * self.chunk_size - length = last_block_size if i == blocks - 1 else self.chunk_size - yield index, SubStream(self.stream, index, length, lock) - - def process_substream_block(self, block_data): - return self._upload_substream_block_with_progress(block_data[0], block_data[1]) - - def _upload_substream_block(self, index, block_stream): - raise NotImplementedError("Must be implemented by child class.") - - def _upload_substream_block_with_progress(self, index, block_stream): - range_id = self._upload_substream_block(index, block_stream) - self._update_progress(len(block_stream)) - return range_id - - def set_response_properties(self, resp): - self.etag = resp.etag - self.last_modified = resp.last_modified - - -class BlockBlobChunkUploader(_ChunkUploader): - - def __init__(self, *args, **kwargs): - kwargs.pop("modified_access_conditions", None) - super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) - self.current_length = None - - def _upload_chunk(self, chunk_offset, chunk_data): - # TODO: This is incorrect, but works with recording. - index = f'{chunk_offset:032d}' - block_id = encode_base64(url_quote(encode_base64(index))) - self.service.stage_block( - block_id, - len(chunk_data), - chunk_data, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - return index, block_id - - def _upload_substream_block(self, index, block_stream): - try: - block_id = f'BlockId{(index//self.chunk_size):05}' - self.service.stage_block( - block_id, - len(block_stream), - block_stream, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - finally: - block_stream.close() - return block_id - - -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def _is_chunk_empty(self, chunk_data): - # read until non-zero byte is encountered - # if reached the end without returning, then chunk_data is all 0's - return not any(bytearray(chunk_data)) - - def _upload_chunk(self, chunk_offset, chunk_data): - # avoid uploading the empty pages - if not self._is_chunk_empty(chunk_data): - chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = f"bytes={chunk_offset}-{chunk_end}" - computed_md5 = None - self.response_headers = self.service.upload_pages( - body=chunk_data, - content_length=len(chunk_data), - transactional_content_md5=computed_md5, - range=content_range, - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - - if not self.parallel and self.request_options.get('modified_access_conditions'): - self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] - - def _upload_substream_block(self, index, block_stream): - pass - - -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def __init__(self, *args, **kwargs): - super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) - self.current_length = None - - def _upload_chunk(self, chunk_offset, chunk_data): - if self.current_length is None: - self.response_headers = self.service.append_block( - body=chunk_data, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - self.current_length = int(self.response_headers["blob_append_offset"]) - else: - self.request_options['append_position_access_conditions'].append_position = \ - self.current_length + chunk_offset - self.response_headers = self.service.append_block( - body=chunk_data, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - - def _upload_substream_block(self, index, block_stream): - pass - - -class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def _upload_chunk(self, chunk_offset, chunk_data): - # avoid uploading the empty pages - self.response_headers = self.service.append_data( - body=chunk_data, - position=chunk_offset, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - - if not self.parallel and self.request_options.get('modified_access_conditions'): - self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] - - def _upload_substream_block(self, index, block_stream): - try: - self.service.append_data( - body=block_stream, - position=index, - content_length=len(block_stream), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - finally: - block_stream.close() - - -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def _upload_chunk(self, chunk_offset, chunk_data): - length = len(chunk_data) - chunk_end = chunk_offset + length - 1 - response = self.service.upload_range( - chunk_data, - chunk_offset, - length, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - return f'bytes={chunk_offset}-{chunk_end}', response - - # TODO: Implement this method. - def _upload_substream_block(self, index, block_stream): - pass - - -class SubStream(IOBase): - - def __init__(self, wrapped_stream, stream_begin_index, length, lockObj): - # Python 2.7: file-like objects created with open() typically support seek(), but are not - # derivations of io.IOBase and thus do not implement seekable(). - # Python > 3.0: file-like objects created with open() are derived from io.IOBase. - try: - # only the main thread runs this, so there's no need grabbing the lock - wrapped_stream.seek(0, SEEK_CUR) - except Exception as exc: - raise ValueError("Wrapped stream must support seek().") from exc - - self._lock = lockObj - self._wrapped_stream = wrapped_stream - self._position = 0 - self._stream_begin_index = stream_begin_index - self._length = length - self._buffer = BytesIO() - - # we must avoid buffering more than necessary, and also not use up too much memory - # so the max buffer size is capped at 4MB - self._max_buffer_size = ( - length if length < _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE else _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE - ) - self._current_buffer_start = 0 - self._current_buffer_size = 0 - super(SubStream, self).__init__() - - def __len__(self): - return self._length - - def close(self): - if self._buffer: - self._buffer.close() - self._wrapped_stream = None - IOBase.close(self) - - def fileno(self): - return self._wrapped_stream.fileno() - - def flush(self): - pass - - def read(self, size=None): - if self.closed: # pylint: disable=using-constant-test - raise ValueError("Stream is closed.") - - if size is None: - size = self._length - self._position - - # adjust if out of bounds - if size + self._position >= self._length: - size = self._length - self._position - - # return fast - if size == 0 or self._buffer.closed: - return b"" - - # attempt first read from the read buffer and update position - read_buffer = self._buffer.read(size) - bytes_read = len(read_buffer) - bytes_remaining = size - bytes_read - self._position += bytes_read - - # repopulate the read buffer from the underlying stream to fulfill the request - # ensure the seek and read operations are done atomically (only if a lock is provided) - if bytes_remaining > 0: - with self._buffer: - # either read in the max buffer size specified on the class - # or read in just enough data for the current block/sub stream - current_max_buffer_size = min(self._max_buffer_size, self._length - self._position) - - # lock is only defined if max_concurrency > 1 (parallel uploads) - if self._lock: - with self._lock: - # reposition the underlying stream to match the start of the data to read - absolute_position = self._stream_begin_index + self._position - self._wrapped_stream.seek(absolute_position, SEEK_SET) - # If we can't seek to the right location, our read will be corrupted so fail fast. - if self._wrapped_stream.tell() != absolute_position: - raise IOError("Stream failed to seek to the desired location.") - buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) - else: - absolute_position = self._stream_begin_index + self._position - # It's possible that there's connection problem during data transfer, - # so when we retry we don't want to read from current position of wrapped stream, - # instead we should seek to where we want to read from. - if self._wrapped_stream.tell() != absolute_position: - self._wrapped_stream.seek(absolute_position, SEEK_SET) - - buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) - - if buffer_from_stream: - # update the buffer with new data from the wrapped stream - # we need to note down the start position and size of the buffer, in case seek is performed later - self._buffer = BytesIO(buffer_from_stream) - self._current_buffer_start = self._position - self._current_buffer_size = len(buffer_from_stream) - - # read the remaining bytes from the new buffer and update position - second_read_buffer = self._buffer.read(bytes_remaining) - read_buffer += second_read_buffer - self._position += len(second_read_buffer) - - return read_buffer - - def readable(self): - return True - - def readinto(self, b): - raise UnsupportedOperation - - def seek(self, offset, whence=0): - if whence is SEEK_SET: - start_index = 0 - elif whence is SEEK_CUR: - start_index = self._position - elif whence is SEEK_END: - start_index = self._length - offset = -offset - else: - raise ValueError("Invalid argument for the 'whence' parameter.") - - pos = start_index + offset - - if pos > self._length: - pos = self._length - elif pos < 0: - pos = 0 - - # check if buffer is still valid - # if not, drop buffer - if pos < self._current_buffer_start or pos >= self._current_buffer_start + self._current_buffer_size: - self._buffer.close() - self._buffer = BytesIO() - else: # if yes seek to correct position - delta = pos - self._current_buffer_start - self._buffer.seek(delta, SEEK_SET) - - self._position = pos - return pos - - def seekable(self): - return True - - def tell(self): - return self._position - - def write(self): - raise UnsupportedOperation - - def writelines(self): - raise UnsupportedOperation - - def writeable(self): - return False - - -class IterStreamer(object): - """ - File-like streaming iterator. - """ - - def __init__(self, generator, encoding="UTF-8"): - self.generator = generator - self.iterator = iter(generator) - self.leftover = b"" - self.encoding = encoding - - def __len__(self): - return self.generator.__len__() - - def __iter__(self): - return self.iterator - - def seekable(self): - return False - - def __next__(self): - return next(self.iterator) - - def tell(self, *args, **kwargs): - raise UnsupportedOperation("Data generator does not support tell.") - - def seek(self, *args, **kwargs): - raise UnsupportedOperation("Data generator is not seekable.") - - def read(self, size): - data = self.leftover - count = len(self.leftover) - try: - while count < size: - chunk = self.__next__() - if isinstance(chunk, str): - chunk = chunk.encode(self.encoding) - data += chunk - count += len(chunk) - # This means count < size and what's leftover will be returned in this call. - except StopIteration: - self.leftover = b"" - - if count >= size: - self.leftover = data[size:] - - return data[:size] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py deleted file mode 100644 index d7e2a496f193..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py +++ /dev/null @@ -1,460 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import asyncio -import inspect -import threading -from asyncio import Lock -from io import UnsupportedOperation -from itertools import islice -from math import ceil -from typing import AsyncGenerator, Union - -from .import encode_base64, url_quote -from .request_handlers import get_length -from .response_handlers import return_response_headers -from .uploads import SubStream, IterStreamer # pylint: disable=unused-import - - -async def _async_parallel_uploads(uploader, pending, running): - range_ids = [] - while True: - # Wait for some download to finish before adding a new one - done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) - range_ids.extend([chunk.result() for chunk in done]) - try: - for _ in range(0, len(done)): - next_chunk = await pending.__anext__() - running.add(asyncio.ensure_future(uploader(next_chunk))) - except StopAsyncIteration: - break - - # Wait for the remaining uploads to finish - if running: - done, _running = await asyncio.wait(running) - range_ids.extend([chunk.result() for chunk in done]) - return range_ids - - -async def _parallel_uploads(uploader, pending, running): - range_ids = [] - while True: - # Wait for some download to finish before adding a new one - done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) - range_ids.extend([chunk.result() for chunk in done]) - try: - for _ in range(0, len(done)): - next_chunk = next(pending) - running.add(asyncio.ensure_future(uploader(next_chunk))) - except StopIteration: - break - - # Wait for the remaining uploads to finish - if running: - done, _running = await asyncio.wait(running) - range_ids.extend([chunk.result() for chunk in done]) - return range_ids - - -async def upload_data_chunks( - service=None, - uploader_class=None, - total_size=None, - chunk_size=None, - max_concurrency=None, - stream=None, - progress_hook=None, - **kwargs): - - parallel = max_concurrency > 1 - if parallel and 'modified_access_conditions' in kwargs: - # Access conditions do not work with parallelism - kwargs['modified_access_conditions'] = None - - uploader = uploader_class( - service=service, - total_size=total_size, - chunk_size=chunk_size, - stream=stream, - parallel=parallel, - progress_hook=progress_hook, - **kwargs) - - if parallel: - upload_tasks = uploader.get_chunk_streams() - running_futures = [] - for _ in range(max_concurrency): - try: - chunk = await upload_tasks.__anext__() - running_futures.append(asyncio.ensure_future(uploader.process_chunk(chunk))) - except StopAsyncIteration: - break - - range_ids = await _async_parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) - else: - range_ids = [] - async for chunk in uploader.get_chunk_streams(): - range_ids.append(await uploader.process_chunk(chunk)) - - if any(range_ids): - return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] - return uploader.response_headers - - -async def upload_substream_blocks( - service=None, - uploader_class=None, - total_size=None, - chunk_size=None, - max_concurrency=None, - stream=None, - progress_hook=None, - **kwargs): - parallel = max_concurrency > 1 - if parallel and 'modified_access_conditions' in kwargs: - # Access conditions do not work with parallelism - kwargs['modified_access_conditions'] = None - uploader = uploader_class( - service=service, - total_size=total_size, - chunk_size=chunk_size, - stream=stream, - parallel=parallel, - progress_hook=progress_hook, - **kwargs) - - if parallel: - upload_tasks = uploader.get_substream_blocks() - running_futures = [ - asyncio.ensure_future(uploader.process_substream_block(u)) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures) - else: - range_ids = [] - for block in uploader.get_substream_blocks(): - range_ids.append(await uploader.process_substream_block(block)) - if any(range_ids): - return sorted(range_ids) - return - - -class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - - def __init__( - self, service, - total_size, - chunk_size, - stream, - parallel, - encryptor=None, - padder=None, - progress_hook=None, - **kwargs): - self.service = service - self.total_size = total_size - self.chunk_size = chunk_size - self.stream = stream - self.parallel = parallel - - # Stream management - self.stream_lock = threading.Lock() if parallel else None - - # Progress feedback - self.progress_total = 0 - self.progress_lock = Lock() if parallel else None - self.progress_hook = progress_hook - - # Encryption - self.encryptor = encryptor - self.padder = padder - self.response_headers = None - self.etag = None - self.last_modified = None - self.request_options = kwargs - - async def get_chunk_streams(self): - index = 0 - while True: - data = b'' - read_size = self.chunk_size - - # Buffer until we either reach the end of the stream or get a whole chunk. - while True: - if self.total_size: - read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) - temp = self.stream.read(read_size) - if inspect.isawaitable(temp): - temp = await temp - if not isinstance(temp, bytes): - raise TypeError('Blob data should be of type bytes.') - data += temp or b"" - - # We have read an empty string and so are at the end - # of the buffer or we have read a full chunk. - if temp == b'' or len(data) == self.chunk_size: - break - - if len(data) == self.chunk_size: - if self.padder: - data = self.padder.update(data) - if self.encryptor: - data = self.encryptor.update(data) - yield index, data - else: - if self.padder: - data = self.padder.update(data) + self.padder.finalize() - if self.encryptor: - data = self.encryptor.update(data) + self.encryptor.finalize() - if data: - yield index, data - break - index += len(data) - - async def process_chunk(self, chunk_data): - chunk_bytes = chunk_data[1] - chunk_offset = chunk_data[0] - return await self._upload_chunk_with_progress(chunk_offset, chunk_bytes) - - async def _update_progress(self, length): - if self.progress_lock is not None: - async with self.progress_lock: - self.progress_total += length - else: - self.progress_total += length - - if self.progress_hook: - await self.progress_hook(self.progress_total, self.total_size) - - async def _upload_chunk(self, chunk_offset, chunk_data): - raise NotImplementedError("Must be implemented by child class.") - - async def _upload_chunk_with_progress(self, chunk_offset, chunk_data): - range_id = await self._upload_chunk(chunk_offset, chunk_data) - await self._update_progress(len(chunk_data)) - return range_id - - def get_substream_blocks(self): - assert self.chunk_size is not None - lock = self.stream_lock - blob_length = self.total_size - - if blob_length is None: - blob_length = get_length(self.stream) - if blob_length is None: - raise ValueError("Unable to determine content length of upload data.") - - blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) - last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size - - for i in range(blocks): - index = i * self.chunk_size - length = last_block_size if i == blocks - 1 else self.chunk_size - yield index, SubStream(self.stream, index, length, lock) - - async def process_substream_block(self, block_data): - return await self._upload_substream_block_with_progress(block_data[0], block_data[1]) - - async def _upload_substream_block(self, index, block_stream): - raise NotImplementedError("Must be implemented by child class.") - - async def _upload_substream_block_with_progress(self, index, block_stream): - range_id = await self._upload_substream_block(index, block_stream) - await self._update_progress(len(block_stream)) - return range_id - - def set_response_properties(self, resp): - self.etag = resp.etag - self.last_modified = resp.last_modified - - -class BlockBlobChunkUploader(_ChunkUploader): - - def __init__(self, *args, **kwargs): - kwargs.pop('modified_access_conditions', None) - super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) - self.current_length = None - - async def _upload_chunk(self, chunk_offset, chunk_data): - # TODO: This is incorrect, but works with recording. - index = f'{chunk_offset:032d}' - block_id = encode_base64(url_quote(encode_base64(index))) - await self.service.stage_block( - block_id, - len(chunk_data), - body=chunk_data, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options) - return index, block_id - - async def _upload_substream_block(self, index, block_stream): - try: - block_id = f'BlockId{(index//self.chunk_size):05}' - await self.service.stage_block( - block_id, - len(block_stream), - block_stream, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options) - finally: - block_stream.close() - return block_id - - -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def _is_chunk_empty(self, chunk_data): - # read until non-zero byte is encountered - # if reached the end without returning, then chunk_data is all 0's - for each_byte in chunk_data: - if each_byte not in [0, b'\x00']: - return False - return True - - async def _upload_chunk(self, chunk_offset, chunk_data): - # avoid uploading the empty pages - if not self._is_chunk_empty(chunk_data): - chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = f'bytes={chunk_offset}-{chunk_end}' - computed_md5 = None - self.response_headers = await self.service.upload_pages( - body=chunk_data, - content_length=len(chunk_data), - transactional_content_md5=computed_md5, - range=content_range, - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options) - - if not self.parallel and self.request_options.get('modified_access_conditions'): - self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] - - async def _upload_substream_block(self, index, block_stream): - pass - - -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - def __init__(self, *args, **kwargs): - super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) - self.current_length = None - - async def _upload_chunk(self, chunk_offset, chunk_data): - if self.current_length is None: - self.response_headers = await self.service.append_block( - body=chunk_data, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options) - self.current_length = int(self.response_headers['blob_append_offset']) - else: - self.request_options['append_position_access_conditions'].append_position = \ - self.current_length + chunk_offset - self.response_headers = await self.service.append_block( - body=chunk_data, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options) - - async def _upload_substream_block(self, index, block_stream): - pass - - -class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - async def _upload_chunk(self, chunk_offset, chunk_data): - self.response_headers = await self.service.append_data( - body=chunk_data, - position=chunk_offset, - content_length=len(chunk_data), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - - if not self.parallel and self.request_options.get('modified_access_conditions'): - self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] - - async def _upload_substream_block(self, index, block_stream): - try: - await self.service.append_data( - body=block_stream, - position=index, - content_length=len(block_stream), - cls=return_response_headers, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - finally: - block_stream.close() - - -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method - - async def _upload_chunk(self, chunk_offset, chunk_data): - length = len(chunk_data) - chunk_end = chunk_offset + length - 1 - response = await self.service.upload_range( - chunk_data, - chunk_offset, - length, - data_stream_total=self.total_size, - upload_stream_current=self.progress_total, - **self.request_options - ) - range_id = f'bytes={chunk_offset}-{chunk_end}' - return range_id, response - - # TODO: Implement this method. - async def _upload_substream_block(self, index, block_stream): - pass - - -class AsyncIterStreamer(): - """ - File-like streaming object for AsyncGenerators. - """ - def __init__(self, generator: AsyncGenerator[Union[bytes, str], None], encoding: str = "UTF-8"): - self.iterator = generator.__aiter__() - self.leftover = b"" - self.encoding = encoding - - def seekable(self): - return False - - def tell(self, *args, **kwargs): - raise UnsupportedOperation("Data generator does not support tell.") - - def seek(self, *args, **kwargs): - raise UnsupportedOperation("Data generator is not seekable.") - - async def read(self, size: int) -> bytes: - data = self.leftover - count = len(self.leftover) - try: - while count < size: - chunk = await self.iterator.__anext__() - if isinstance(chunk, str): - chunk = chunk.encode(self.encoding) - data += chunk - count += len(chunk) - # This means count < size and what's leftover will be returned in this call. - except StopAsyncIteration: - self.leftover = b"" - - if count >= size: - self.leftover = data[size:] - - return data[:size] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py deleted file mode 100644 index bd33559d180b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py +++ /dev/null @@ -1,700 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -from typing import ( - Any, Callable, Optional, Union, - TYPE_CHECKING -) -from urllib.parse import parse_qs - -from ._shared import sign_string, url_quote -from ._shared.constants import X_MS_VERSION -from ._shared.models import Services, UserDelegationKey -from ._shared.shared_access_signature import QueryStringConstants, SharedAccessSignature, _SharedAccessHelper - -if TYPE_CHECKING: - from datetime import datetime - from ..blob import AccountSasPermissions, BlobSasPermissions, ContainerSasPermissions, ResourceTypes - - -class BlobQueryStringConstants(object): - SIGNED_TIMESTAMP = 'snapshot' - - -class BlobSharedAccessSignature(SharedAccessSignature): - ''' - Provides a factory for creating blob and container access - signature tokens with a common account name and account key. Users can either - use the factory or can construct the appropriate service and use the - generate_*_shared_access_signature method directly. - ''' - - def __init__( - self, account_name: str, - account_key: Optional[str] = None, - user_delegation_key: Optional[UserDelegationKey] = None - ) -> None: - ''' - :param str account_name: - The storage account name used to generate the shared access signatures. - :param Optional[str] account_key: - The access key to generate the shares access signatures. - :param Optional[~azure.storage.blob.models.UserDelegationKey] user_delegation_key: - Instead of an account key, the user could pass in a user delegation key. - A user delegation key can be obtained from the service by authenticating with an AAD identity; - this can be accomplished by calling get_user_delegation_key on any Blob service object. - ''' - super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) - self.user_delegation_key = user_delegation_key - - def generate_blob( - self, container_name: str, - blob_name: str, - snapshot: Optional[str] = None, - version_id: Optional[str] = None, - permission: Optional[Union["BlobSasPermissions", str]] = None, - expiry: Optional[Union["datetime", str]] = None, - start: Optional[Union["datetime", str]] = None, - policy_id: Optional[str] = None, - ip: Optional[str] = None, - protocol: Optional[str] = None, - cache_control: Optional[str] = None, - content_disposition: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_type: Optional[str] = None, - sts_hook: Optional[Callable[[str], None]] = None, - **kwargs: Any - ) -> str: - ''' - Generates a shared access signature for the blob or one of its snapshots. - Use the returned signature with the sas_token parameter of any BlobService. - - :param str container_name: - Name of container. - :param str blob_name: - Name of blob. - :param str snapshot: - The snapshot parameter is an opaque datetime value that, - when present, specifies the blob snapshot to grant permission. - :param str version_id: - An optional blob version ID. This parameter is only applicable for versioning-enabled - Storage accounts. Note that the 'versionid' query parameter is not included in the output - SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output - SAS to operate on a specific version. - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Permissions must be ordered racwdxytmei. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or BlobSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: datetime or str - :param str policy_id: - A unique value up to 64 characters in length that correlates to a - stored access policy. To create a stored access policy, use - set_blob_service_properties. - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :param str protocol: - Specifies the protocol permitted for a request made. The default value - is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. - :param str cache_control: - Response header value for Cache-Control when resource is accessed - using this shared access signature. - :param str content_disposition: - Response header value for Content-Disposition when resource is accessed - using this shared access signature. - :param str content_encoding: - Response header value for Content-Encoding when resource is accessed - using this shared access signature. - :param str content_language: - Response header value for Content-Language when resource is accessed - using this shared access signature. - :param str content_type: - Response header value for Content-Type when resource is accessed - using this shared access signature. - :param sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :type sts_hook: Optional[Callable[[str], None]] - :return: A Shared Access Signature (sas) token. - :rtype: str - ''' - resource_path = container_name + '/' + blob_name - - sas = _BlobSharedAccessHelper() - sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) - sas.add_id(policy_id) - - resource = 'bs' if snapshot else 'b' - resource = 'bv' if version_id else resource - resource = 'd' if kwargs.pop("is_directory", None) else resource - sas.add_resource(resource) - - sas.add_timestamp(snapshot or version_id) - sas.add_override_response_headers(cache_control, content_disposition, - content_encoding, content_language, - content_type) - sas.add_encryption_scope(**kwargs) - sas.add_info_for_hns_account(**kwargs) - sas.add_resource_signature(self.account_name, self.account_key, resource_path, - user_delegation_key=self.user_delegation_key) - - if sts_hook is not None: - sts_hook(sas.string_to_sign) - - return sas.get_token() - - def generate_container( - self, container_name: str, - permission: Optional[Union["ContainerSasPermissions", str]] = None, - expiry: Optional[Union["datetime", str]] = None, - start: Optional[Union["datetime", str]] = None, - policy_id: Optional[str] = None, - ip: Optional[str] = None, - protocol: Optional[str] = None, - cache_control: Optional[str] = None, - content_disposition: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_type: Optional[str] = None, - sts_hook: Optional[Callable[[str], None]] = None, - **kwargs: Any - ) -> str: - ''' - Generates a shared access signature for the container. - Use the returned signature with the sas_token parameter of any BlobService. - - :param str container_name: - Name of container. - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Permissions must be ordered racwdxyltfmei. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or ContainerSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: datetime or str - :param str policy_id: - A unique value up to 64 characters in length that correlates to a - stored access policy. To create a stored access policy, use - set_blob_service_properties. - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :param str protocol: - Specifies the protocol permitted for a request made. The default value - is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. - :param str cache_control: - Response header value for Cache-Control when resource is accessed - using this shared access signature. - :param str content_disposition: - Response header value for Content-Disposition when resource is accessed - using this shared access signature. - :param str content_encoding: - Response header value for Content-Encoding when resource is accessed - using this shared access signature. - :param str content_language: - Response header value for Content-Language when resource is accessed - using this shared access signature. - :param str content_type: - Response header value for Content-Type when resource is accessed - using this shared access signature. - :param sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :type sts_hook: Optional[Callable[[str], None]] - :return: A Shared Access Signature (sas) token. - :rtype: str - ''' - sas = _BlobSharedAccessHelper() - sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) - sas.add_id(policy_id) - sas.add_resource('c') - sas.add_override_response_headers(cache_control, content_disposition, - content_encoding, content_language, - content_type) - sas.add_encryption_scope(**kwargs) - sas.add_info_for_hns_account(**kwargs) - sas.add_resource_signature(self.account_name, self.account_key, container_name, - user_delegation_key=self.user_delegation_key) - - if sts_hook is not None: - sts_hook(sas.string_to_sign) - - return sas.get_token() - - -class _BlobSharedAccessHelper(_SharedAccessHelper): - - def add_timestamp(self, timestamp): - self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp) - - def add_info_for_hns_account(self, **kwargs): - self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None)) - self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None)) - self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None)) - self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None)) - - def get_value_to_append(self, query): - return_value = self.query_dict.get(query) or '' - return return_value + '\n' - - def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): - # pylint: disable = no-member - if path[0] != '/': - path = '/' + path - - canonicalized_resource = '/blob/' + account_name + path + '\n' - - # Form the string to sign from shared_access_policy and canonicalized - # resource. The order of values is important. - string_to_sign = \ - (self.get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + - self.get_value_to_append(QueryStringConstants.SIGNED_START) + - self.get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + - canonicalized_resource) - - if user_delegation_key is not None: - self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid) - self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid) - self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start) - self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) - self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) - self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) - - string_to_sign += \ - (self.get_value_to_append(QueryStringConstants.SIGNED_OID) + - self.get_value_to_append(QueryStringConstants.SIGNED_TID) + - self.get_value_to_append(QueryStringConstants.SIGNED_KEY_START) + - self.get_value_to_append(QueryStringConstants.SIGNED_KEY_EXPIRY) + - self.get_value_to_append(QueryStringConstants.SIGNED_KEY_SERVICE) + - self.get_value_to_append(QueryStringConstants.SIGNED_KEY_VERSION) + - self.get_value_to_append(QueryStringConstants.SIGNED_AUTHORIZED_OID) + - self.get_value_to_append(QueryStringConstants.SIGNED_UNAUTHORIZED_OID) + - self.get_value_to_append(QueryStringConstants.SIGNED_CORRELATION_ID)) - else: - string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) - - string_to_sign += \ - (self.get_value_to_append(QueryStringConstants.SIGNED_IP) + - self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + - self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + - self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + - self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + - self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + - self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + - self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + - self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + - self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) + - self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE)) - - # remove the trailing newline - if string_to_sign[-1] == '\n': - string_to_sign = string_to_sign[:-1] - - self._add_query(QueryStringConstants.SIGNED_SIGNATURE, - sign_string(account_key if user_delegation_key is None else user_delegation_key.value, - string_to_sign)) - self.string_to_sign = string_to_sign - - def get_token(self) -> str: - # a conscious decision was made to exclude the timestamp in the generated token - # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp - exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] - return '&'.join([f'{n}={url_quote(v)}' - for n, v in self.query_dict.items() if v is not None and n not in exclude]) - - -def generate_account_sas( - account_name: str, - account_key: str, - resource_types: Union["ResourceTypes", str], - permission: Union["AccountSasPermissions", str], - expiry: Union["datetime", str], - start: Optional[Union["datetime", str]] = None, - ip: Optional[str] = None, - *, - services: Union[Services, str] = Services(blob=True), - sts_hook: Optional[Callable[[str], None]] = None, - **kwargs: Any -) -> str: - """Generates a shared access signature for the blob service. - - Use the returned signature with the credential parameter of any BlobServiceClient, - ContainerClient or BlobClient. - - :param str account_name: - The storage account name used to generate the shared access signature. - :param str account_key: - The account key, also called shared key or access key, to generate the shared access signature. - :param resource_types: - Specifies the resource types that are accessible with the account SAS. - :type resource_types: str or ~azure.storage.blob.ResourceTypes - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - :type permission: str or ~azure.storage.blob.AccountSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - The provided datetime will always be interpreted as UTC. - :type expiry: ~datetime.datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: ~datetime.datetime or str - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :keyword Union[Services, str] services: - Specifies the services that the Shared Access Signature (sas) token will be able to be utilized with. - Will default to only this package (i.e. blobs) if not provided. - :keyword str protocol: - Specifies the protocol permitted for a request made. The default value is https. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. - :keyword sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :paramtype sts_hook: Optional[Callable[[str], None]] - :return: A Shared Access Signature (sas) token. - :rtype: str - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START create_sas_token] - :end-before: [END create_sas_token] - :language: python - :dedent: 8 - :caption: Generating a shared access signature. - """ - sas = SharedAccessSignature(account_name, account_key) - return sas.generate_account( - services=services, - resource_types=resource_types, - permission=permission, - expiry=expiry, - start=start, - ip=ip, - sts_hook=sts_hook, - **kwargs - ) - - -def generate_container_sas( - account_name: str, - container_name: str, - account_key: Optional[str] = None, - user_delegation_key: Optional[UserDelegationKey] = None, - permission: Optional[Union["ContainerSasPermissions", str]] = None, - expiry: Optional[Union["datetime", str]] = None, - start: Optional[Union["datetime", str]] = None, - policy_id: Optional[str] = None, - ip: Optional[str] = None, - *, - sts_hook: Optional[Callable[[str], None]] = None, - **kwargs: Any -) -> str: - """Generates a shared access signature for a container. - - Use the returned signature with the credential parameter of any BlobServiceClient, - ContainerClient or BlobClient. - - :param str account_name: - The storage account name used to generate the shared access signature. - :param str container_name: - The name of the container. - :param str account_key: - The account key, also called shared key or access key, to generate the shared access signature. - Either `account_key` or `user_delegation_key` must be specified. - :param ~azure.storage.blob.UserDelegationKey user_delegation_key: - Instead of an account shared key, the user could pass in a user delegation key. - A user delegation key can be obtained from the service by authenticating with an AAD identity; - this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. - When present, the SAS is signed with the user delegation key instead. - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Permissions must be ordered racwdxyltfmei. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or ~azure.storage.blob.ContainerSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: ~datetime.datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: ~datetime.datetime or str - :param str policy_id: - A unique value up to 64 characters in length that correlates to a - stored access policy. To create a stored access policy, use - :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :keyword str protocol: - Specifies the protocol permitted for a request made. The default value is https. - :keyword str cache_control: - Response header value for Cache-Control when resource is accessed - using this shared access signature. - :keyword str content_disposition: - Response header value for Content-Disposition when resource is accessed - using this shared access signature. - :keyword str content_encoding: - Response header value for Content-Encoding when resource is accessed - using this shared access signature. - :keyword str content_language: - Response header value for Content-Language when resource is accessed - using this shared access signature. - :keyword str content_type: - Response header value for Content-Type when resource is accessed - using this shared access signature. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. - :keyword str correlation_id: - The correlation id to correlate the storage audit logs with the audit logs used by the principal - generating and distributing the SAS. This can only be used when generating a SAS with delegation key. - :keyword sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :paramtype sts_hook: Optional[Callable[[str], None]] - :return: A Shared Access Signature (sas) token. - :rtype: str - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers.py - :start-after: [START generate_sas_token] - :end-before: [END generate_sas_token] - :language: python - :dedent: 12 - :caption: Generating a sas token. - """ - if not policy_id: - if not expiry: - raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") - if not permission: - raise ValueError("'permission' parameter must be provided when not using a stored access policy.") - if not user_delegation_key and not account_key: - raise ValueError("Either user_delegation_key or account_key must be provided.") - if isinstance(account_key, UserDelegationKey): - user_delegation_key = account_key - if user_delegation_key: - sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) - else: - sas = BlobSharedAccessSignature(account_name, account_key=account_key) - return sas.generate_container( - container_name, - permission=permission, - expiry=expiry, - start=start, - policy_id=policy_id, - ip=ip, - sts_hook=sts_hook, - **kwargs - ) - - -def generate_blob_sas( - account_name: str, - container_name: str, - blob_name: str, - snapshot: Optional[str] = None, - account_key: Optional[str] = None, - user_delegation_key: Optional[UserDelegationKey] = None, - permission: Optional[Union["BlobSasPermissions", str]] = None, - expiry: Optional[Union["datetime", str]] = None, - start: Optional[Union["datetime", str]] = None, - policy_id: Optional[str] = None, - ip: Optional[str] = None, - *, - sts_hook: Optional[Callable[[str], None]] = None, - **kwargs: Any -) -> str: - """Generates a shared access signature for a blob. - - Use the returned signature with the credential parameter of any BlobServiceClient, - ContainerClient or BlobClient. - - :param str account_name: - The storage account name used to generate the shared access signature. - :param str container_name: - The name of the container. - :param str blob_name: - The name of the blob. - :param str snapshot: - An optional blob snapshot ID. - :param str account_key: - The account key, also called shared key or access key, to generate the shared access signature. - Either `account_key` or `user_delegation_key` must be specified. - :param ~azure.storage.blob.UserDelegationKey user_delegation_key: - Instead of an account shared key, the user could pass in a user delegation key. - A user delegation key can be obtained from the service by authenticating with an AAD identity; - this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. - When present, the SAS is signed with the user delegation key instead. - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Permissions must be ordered racwdxytmei. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or ~azure.storage.blob.BlobSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: ~datetime.datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. The provided datetime will always - be interpreted as UTC. - :type start: ~datetime.datetime or str - :param str policy_id: - A unique value up to 64 characters in length that correlates to a - stored access policy. To create a stored access policy, use - :func:`~azure.storage.blob.ContainerClient.set_container_access_policy()`. - :param str ip: - Specifies an IP address or a range of IP addresses from which to accept requests. - If the IP address from which the request originates does not match the IP address - or address range specified on the SAS token, the request is not authenticated. - For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS - restricts the request to those IP addresses. - :keyword str version_id: - An optional blob version ID. This parameter is only applicable for versioning-enabled - Storage accounts. Note that the 'versionid' query parameter is not included in the output - SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output - SAS to operate on a specific version. - - .. versionadded:: 12.4.0 - This keyword argument was introduced in API version '2019-12-12'. - :keyword str protocol: - Specifies the protocol permitted for a request made. The default value is https. - :keyword str cache_control: - Response header value for Cache-Control when resource is accessed - using this shared access signature. - :keyword str content_disposition: - Response header value for Content-Disposition when resource is accessed - using this shared access signature. - :keyword str content_encoding: - Response header value for Content-Encoding when resource is accessed - using this shared access signature. - :keyword str content_language: - Response header value for Content-Language when resource is accessed - using this shared access signature. - :keyword str content_type: - Response header value for Content-Type when resource is accessed - using this shared access signature. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. - :keyword str correlation_id: - The correlation id to correlate the storage audit logs with the audit logs used by the principal - generating and distributing the SAS. This can only be used when generating a SAS with delegation key. - :keyword sts_hook: - For debugging purposes only. If provided, the hook is called with the string to sign - that was used to generate the SAS. - :paramtype sts_hook: Optional[Callable[[str], None]] - :return: A Shared Access Signature (sas) token. - :rtype: str - """ - if not policy_id: - if not expiry: - raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") - if not permission: - raise ValueError("'permission' parameter must be provided when not using a stored access policy.") - if not user_delegation_key and not account_key: - raise ValueError("Either user_delegation_key or account_key must be provided.") - if isinstance(account_key, UserDelegationKey): - user_delegation_key = account_key - version_id = kwargs.pop('version_id', None) - if version_id and snapshot: - raise ValueError("snapshot and version_id cannot be set at the same time.") - if user_delegation_key: - sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) - else: - sas = BlobSharedAccessSignature(account_name, account_key=account_key) - return sas.generate_blob( - container_name, - blob_name, - snapshot=snapshot, - version_id=version_id, - permission=permission, - expiry=expiry, - start=start, - policy_id=policy_id, - ip=ip, - sts_hook=sts_hook, - **kwargs - ) - -def _is_credential_sastoken(credential: Any) -> bool: - if not credential or not isinstance(credential, str): - return False - - sas_values = QueryStringConstants.to_list() - parsed_query = parse_qs(credential.lstrip("?")) - if parsed_query and all(k in sas_values for k in parsed_query): - return True - return False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_upload_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_upload_helpers.py deleted file mode 100644 index 2ce55f7ab237..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_upload_helpers.py +++ /dev/null @@ -1,354 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from io import SEEK_SET, UnsupportedOperation -from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING - -from azure.core.exceptions import ResourceExistsError, ResourceModifiedError, HttpResponseError - -from ._encryption import ( - _ENCRYPTION_PROTOCOL_V1, - _ENCRYPTION_PROTOCOL_V2, - encrypt_blob, - GCMBlobEncryptionStream, - generate_blob_encryption_data, - get_adjusted_upload_size, - get_blob_encryptor_and_padder -) -from ._generated.models import ( - AppendPositionAccessConditions, - BlockLookupList, - ModifiedAccessConditions -) -from ._shared.models import StorageErrorCode -from ._shared.response_handlers import process_storage_error, return_response_headers -from ._shared.uploads import ( - AppendBlobChunkUploader, - BlockBlobChunkUploader, - PageBlobChunkUploader, - upload_data_chunks, - upload_substream_blocks -) - -if TYPE_CHECKING: - from ._generated.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations - from ._shared.models import StorageConfiguration - BlobLeaseClient = TypeVar("BlobLeaseClient") - -_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 -_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' - - -def _convert_mod_error(error): - message = error.message.replace( - "The condition specified using HTTP conditional header(s) is not met.", - "The specified blob already exists.") - message = message.replace("ConditionNotMet", "BlobAlreadyExists") - overwrite_error = ResourceExistsError( - message=message, - response=error.response, - error=error) - overwrite_error.error_code = StorageErrorCode.blob_already_exists - raise overwrite_error - - -def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument - return any([ - modified_access_conditions.if_modified_since, - modified_access_conditions.if_unmodified_since, - modified_access_conditions.if_none_match, - modified_access_conditions.if_match - ]) - - -def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements - client: "BlockBlobOperations", - stream: IO, - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - validate_content: bool, - max_concurrency: Optional[int], - length: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if not overwrite and not _any_conditions(**kwargs): - kwargs['modified_access_conditions'].if_none_match = '*' - adjusted_count = length - if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) - blob_headers = kwargs.pop('blob_headers', None) - tier = kwargs.pop('standard_blob_tier', None) - blob_tags_string = kwargs.pop('blob_tags_string', None) - - immutability_policy = kwargs.pop('immutability_policy', None) - immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time - immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode - legal_hold = kwargs.pop('legal_hold', None) - progress_hook = kwargs.pop('progress_hook', None) - - # Do single put if the size is smaller than or equal config.max_single_put_size - if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - data = stream.read(length or -1) - if not isinstance(data, bytes): - raise TypeError('Blob data should be of type bytes.') - - if encryption_options.get('key'): - encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_data - - response = client.upload( - body=data, # type: ignore [arg-type] - content_length=adjusted_count, - blob_http_headers=blob_headers, - headers=headers, - cls=return_response_headers, - validate_content=validate_content, - data_stream_total=adjusted_count, - upload_stream_current=0, - tier=tier.value if tier else None, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - **kwargs) - - if progress_hook: - progress_hook(adjusted_count, adjusted_count) - - return cast(Dict[str, Any], response) - - use_original_upload_path = blob_settings.use_byte_buffer or \ - validate_content or encryption_options.get('required') or \ - blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ - hasattr(stream, 'seekable') and not stream.seekable() or \ - not hasattr(stream, 'seek') or not hasattr(stream, 'tell') - - if use_original_upload_path: - total_size = length - encryptor, padder = None, None - if encryption_options and encryption_options.get('key'): - cek, iv, encryption_metadata = generate_blob_encryption_data( - encryption_options['key'], - encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_metadata - - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: - encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) - - # Adjust total_size for encryption V2 - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: - # Adjust total_size for encryption V2 - total_size = adjusted_count - # V2 wraps the data stream with an encryption stream - if cek is None: - raise ValueError("Generate encryption metadata failed. 'cek' is None.") - stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] - - block_ids = upload_data_chunks( - service=client, - uploader_class=BlockBlobChunkUploader, - total_size=total_size, - chunk_size=blob_settings.max_block_size, - max_concurrency=max_concurrency, - stream=stream, - validate_content=validate_content, - progress_hook=progress_hook, - encryptor=encryptor, - padder=padder, - headers=headers, - **kwargs - ) - else: - block_ids = upload_substream_blocks( - service=client, - uploader_class=BlockBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - max_concurrency=max_concurrency, - stream=stream, - validate_content=validate_content, - progress_hook=progress_hook, - headers=headers, - **kwargs - ) - - block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) - block_lookup.latest = block_ids - return cast(Dict[str, Any], client.commit_block_list( - block_lookup, - blob_http_headers=blob_headers, - cls=return_response_headers, - validate_content=validate_content, - headers=headers, - tier=tier.value if tier else None, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - **kwargs)) - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceModifiedError as mod_error: - if not overwrite: - _convert_mod_error(mod_error) - raise - - -def upload_page_blob( - client: "PageBlobOperations", - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - stream: IO, - length: Optional[int] = None, - validate_content: Optional[bool] = None, - max_concurrency: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if not overwrite and not _any_conditions(**kwargs): - kwargs['modified_access_conditions'].if_none_match = '*' - if length is None or length < 0: - raise ValueError("A content length must be specified for a Page Blob.") - if length % 512 != 0: - raise ValueError(f"Invalid page blob size: {length}. " - "The size must be aligned to a 512-byte boundary.") - tier = None - if kwargs.get('premium_page_blob_tier'): - premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') - try: - tier = premium_page_blob_tier.value - except AttributeError: - tier = premium_page_blob_tier - - if encryption_options and encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data( - encryption_options['key'], - encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_data - - blob_tags_string = kwargs.pop('blob_tags_string', None) - progress_hook = kwargs.pop('progress_hook', None) - - response = cast(Dict[str, Any], client.create( - content_length=0, - blob_content_length=length, - blob_sequence_number=None, # type: ignore [arg-type] - blob_http_headers=kwargs.pop('blob_headers', None), - blob_tags_string=blob_tags_string, - tier=tier, - cls=return_response_headers, - headers=headers, - **kwargs)) - if length == 0: - return cast(Dict[str, Any], response) - - if encryption_options and encryption_options.get('key'): - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: - encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - - kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return cast(Dict[str, Any], upload_data_chunks( - service=client, - uploader_class=PageBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_page_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceModifiedError as mod_error: - if not overwrite: - _convert_mod_error(mod_error) - raise - - -def upload_append_blob( # pylint: disable=unused-argument - client: "AppendBlobOperations", - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - stream: IO, - length: Optional[int] = None, - validate_content: Optional[bool] = None, - max_concurrency: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if length == 0: - return {} - blob_headers = kwargs.pop('blob_headers', None) - append_conditions = AppendPositionAccessConditions( - max_size=kwargs.pop('maxsize_condition', None), - append_position=None) - blob_tags_string = kwargs.pop('blob_tags_string', None) - progress_hook = kwargs.pop('progress_hook', None) - - try: - if overwrite: - client.create( - content_length=0, - blob_http_headers=blob_headers, - headers=headers, - blob_tags_string=blob_tags_string, - **kwargs) - return cast(Dict[str, Any], upload_data_chunks( - service=client, - uploader_class=AppendBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - append_position_access_conditions=append_conditions, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - except HttpResponseError as error: - if error.response.status_code != 404: # type: ignore [union-attr] - raise - # rewind the request body if it is a stream - if hasattr(stream, 'read'): - try: - # attempt to rewind the body to the initial position - stream.seek(0, SEEK_SET) - except UnsupportedOperation as exc: - # if body is not seekable, then retry would not work - raise error from exc - client.create( - content_length=0, - blob_http_headers=blob_headers, - headers=headers, - blob_tags_string=blob_tags_string, - **kwargs) - return cast(Dict[str, Any], upload_data_chunks( - service=client, - uploader_class=AppendBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - append_position_access_conditions=append_conditions, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - except HttpResponseError as error: - process_storage_error(error) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_vendor.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_vendor.py new file mode 100644 index 000000000000..cbaa624660e4 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_vendor.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py index 9bdabb440225..be71c81bd282 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py @@ -1,7 +1,9 @@ -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "12.23.0" +VERSION = "1.0.0b1" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py index a755e6a2d59b..03f028b0195f 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py @@ -1,166 +1,23 @@ -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only - -import os - -from typing import Any, AnyStr, Dict, cast, IO, Iterable, Optional, Union, TYPE_CHECKING -from ._list_blobs_helper import BlobPrefix -from .._models import BlobType -from .._shared.policies_async import ExponentialRetry, LinearRetry -from ._blob_client_async import BlobClient -from ._container_client_async import ContainerClient -from ._blob_service_client_async import BlobServiceClient -from ._lease_async import BlobLeaseClient -from ._download_async import StorageStreamDownloader - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential - from azure.core.credentials_async import AsyncTokenCredential - - -async def upload_blob_to_url( - blob_url: str, - data: Union[Iterable[AnyStr], IO[AnyStr]], - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any -) -> Dict[str, Any]: - """Upload data to a given URL - - The data will be uploaded as a block blob. - :param str blob_url: - The full URI to the blob. This can also include a SAS token. - :param data: - The data to upload. This can be bytes, text, an iterable or a file-like object. - :type data: bytes or str or Iterable - :param credential: - The credentials with which to authenticate. This is optional if the - blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword bool overwrite: - Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob_to_url will overwrite any existing data. If set to False, the - operation will fail with a ResourceExistsError. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword dict(str,str) metadata: - Name-value pairs associated with the blob as metadata. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https as https (the default) will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword str encoding: - Encoding to use if text is supplied as input. Defaults to UTF-8. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict[str, Any] - """ - async with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return await cast(BlobClient, client).upload_blob( - data=data, - blob_type=BlobType.BLOCKBLOB, - **kwargs) - - -# Download data to specified open file-handle. -async def _download_to_stream(client, handle, **kwargs): - stream = await client.download_blob(**kwargs) - await stream.readinto(handle) - - -async def download_blob_from_url( - blob_url: str, - output: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any -) -> None: - """Download the contents of a blob to a local file or stream. - - :param str blob_url: - The full URI to the blob. This can also include a SAS token. - :param output: - Where the data should be downloaded to. This could be either a file path to write to, - or an open IO handle to write to. - :type output: str or writable stream - :param credential: - The credentials with which to authenticate. This is optional if the - blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None - :keyword bool overwrite: - Whether the local file should be overwritten if it already exists. The default value is - `False` - in which case a ValueError will be raised if the file already exists. If set to - `True`, an attempt will be made to write to the existing file. If a stream handle is passed - in, this value is ignored. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :keyword int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https as https (the default) will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :rtype: None - """ - overwrite = kwargs.pop('overwrite', False) - async with BlobClient.from_blob_url(blob_url, credential=credential) as client: - if hasattr(output, 'write'): - await _download_to_stream(client, output, **kwargs) - else: - if not overwrite and os.path.isfile(output): - raise ValueError(f"The file '{output}' already exists.") - with open(output, 'wb') as file_handle: - await _download_to_stream(client, file_handle, **kwargs) +from ._client import BlobClient +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'upload_blob_to_url', - 'download_blob_from_url', - 'BlobServiceClient', - 'BlobPrefix', - 'ContainerClient', - 'BlobClient', - 'BlobLeaseClient', - 'ExponentialRetry', - 'LinearRetry', - 'StorageStreamDownloader' + "BlobClient", ] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py deleted file mode 100644 index 41a79246cd75..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ /dev/null @@ -1,3192 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, invalid-overridden-method, docstring-keyword-should-match-keyword-only - -import warnings -from datetime import datetime -from functools import partial -from typing import ( - Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, - TYPE_CHECKING -) -from typing_extensions import Self - -from azure.core.async_paging import AsyncItemPaged -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError -from azure.core.pipeline import AsyncPipeline -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async - -from ._download_async import StorageStreamDownloader -from ._lease_async import BlobLeaseClient -from ._models import PageRangePaged -from ._upload_helpers import ( - upload_append_blob, - upload_block_blob, - upload_page_blob -) -from .._blob_client import StorageAccountHostsMixin -from .._blob_client_helpers import ( - _abort_copy_options, - _append_block_from_url_options, - _append_block_options, - _clear_page_options, - _commit_block_list_options, - _create_append_blob_options, - _create_page_blob_options, - _create_snapshot_options, - _delete_blob_options, - _download_blob_options, - _format_url, - _from_blob_url, - _get_blob_tags_options, - _get_block_list_result, - _get_page_ranges_options, - _parse_url, - _resize_blob_options, - _seal_append_blob_options, - _set_blob_metadata_options, - _set_blob_tags_options, - _set_http_headers_options, - _set_sequence_number_options, - _stage_block_from_url_options, - _stage_block_options, - _start_copy_from_url_options, - _upload_blob_from_url_options, - _upload_blob_options, - _upload_page_options, - _upload_pages_from_url_options -) -from .._deserialize import ( - deserialize_blob_properties, - deserialize_pipeline_response_into_cls, - get_page_ranges_result, - parse_tags -) -from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION -from .._generated.aio import AzureBlobStorage -from .._generated.models import CpkInfo -from .._models import BlobType, BlobBlock, BlobProperties, PageRange -from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str -from .._shared.policies_async import ExponentialRetry -from .._shared.response_handlers import process_storage_error, return_response_headers - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential - from azure.core.credentials_async import AsyncTokenCredential - from azure.core.pipeline.policies import AsyncHTTPPolicy - from azure.storage.blob.aio import ContainerClient - from .._models import ( - ContentSettings, - ImmutabilityPolicy, - PremiumPageBlobTier, - SequenceNumberAction, - StandardBlobTier - ) - - -class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods - """A client to interact with a specific blob, although that blob may not yet exist. - - :param str account_url: - The URI to the storage account. In order to create a client given the full URI to the blob, - use the :func:`from_blob_url` classmethod. - :param container_name: The container name for the blob. - :type container_name: str - :param blob_name: The name of the blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob_name: str - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication_async.py - :start-after: [START create_blob_client] - :end-before: [END create_blob_client] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a URL to a public blob (no auth needed). - - .. literalinclude:: ../samples/blob_samples_authentication_async.py - :start-after: [START create_blob_client_sas_url] - :end-before: [END create_blob_client_sas_url] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a SAS URL to a blob. - """ - def __init__( - self, account_url: str, - container_name: str, - blob_name: str, - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - parsed_url, sas_token, path_snapshot = _parse_url( - account_url=account_url, - container_name=container_name, - blob_name=blob_name) - self.container_name = container_name - self.blob_name = blob_name - - if snapshot is not None and hasattr(snapshot, 'snapshot'): - self.snapshot = snapshot.snapshot - elif isinstance(snapshot, dict): - self.snapshot = snapshot['snapshot'] - else: - self.snapshot = snapshot or path_snapshot - self.version_id = kwargs.pop('version_id', None) - - # This parameter is used for the hierarchy traversal. Give precedence to credential. - self._raw_credential = credential if credential else sas_token - self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) - super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] # pylint: disable=protected-access - self._configure_encryption(kwargs) - - def _format_url(self, hostname: str) -> str: - return _format_url( - container_name=self.container_name, - scheme=self.scheme, - blob_name=self.blob_name, - query_str=self._query_str, - hostname=hostname - ) - - @classmethod - def from_blob_url( - cls, blob_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> Self: - """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. - - :param str blob_url: - The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be - either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. - :type blob_url: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. If specified, this will override - the snapshot in the url. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob client. - :rtype: ~azure.storage.blob.BlobClient - """ - account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) - return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=path_snapshot, credential=credential, **kwargs - ) - - @classmethod - def from_connection_string( - cls, conn_str: str, - container_name: str, - blob_name: str, - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create BlobClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param container_name: The container name for the blob. - :type container_name: str - :param blob_name: The name of the blob with which to interact. - :type blob_name: str - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`create_snapshot`. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob client. - :rtype: ~azure.storage.blob.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string_blob] - :end-before: [END auth_from_connection_string_blob] - :language: python - :dedent: 8 - :caption: Creating the BlobClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=snapshot, credential=credential, **kwargs - ) - - @distributed_trace_async - async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account in which the blob resides. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - """ - try: - return cast(Dict[str, str], - await self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def upload_blob_from_url(self, source_url: str, **kwargs: Any) -> Dict[str, Any]: - """ - Creates a new Block Blob where the content of the blob is read from a given URL. - The content of an existing blob is overwritten with the new blob. - - :param str source_url: - A URL of up to 2 KB in length that specifies a file or blob. - The value should be URL-encoded as it would appear in a request URI. - The source must either be public or must be authenticated via a shared - access signature as part of the url or using the source_authorization keyword. - If the source is public, no authentication is required. - Examples: - https://myaccount.blob.core.windows.net/mycontainer/myblob - - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - - https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. - :keyword bool include_source_blob_properties: - Indicates if properties from the source blob should be copied. Defaults to True. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :paramtype tags: dict(str, str) - :keyword bytearray source_content_md5: - Specify the md5 that is used to verify the integrity of the source bytes. - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword destination_lease: - The lease ID specified for this header must match the lease ID of the - destination blob. If the request does not include the lease ID or it is not - valid, the operation fails with status code 412 (Precondition Failed). - :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Response from creating a new block blob for a given URL. - :rtype: Dict[str, Any] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_blob_from_url_options( - source_url=source_url, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def upload_blob( - self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], - blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, - length: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new blob from a data source with automatic chunking. - - :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] - :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be - either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. The exception to the above is with Append - blob types: if set to False and the data already exists, an error will not be raised - and the data will be appended to the existing blob. If set overwrite=True, then the existing - append blob will be deleted, and a new one created. Defaults to False. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - If specified, upload_blob only succeeds if the - blob's lease is active and matches this ID. - Required if the blob has an active lease. - :paramtype: ~azure.storage.blob.aio.BlobLeaseClient - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - Currently this parameter of upload_blob() API is for BlockBlob only. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - Currently this parameter of upload_blob() API is for BlockBlob only. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword progress_hook: - An async callback to track the progress of a long running upload. The signature is - function(current: int, total: Optional[int]) where current is the number of bytes transferred - so far, and total is the size of the blob or None if the size is unknown. - :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world_async.py - :start-after: [START upload_a_blob] - :end-before: [END upload_a_blob] - :language: python - :dedent: 16 - :caption: Upload a blob to the container. - """ - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_blob_options( - data=data, - blob_type=blob_type, - length=length, - metadata=metadata, - encryption_options={ - 'required': self.require_encryption, - 'version': self.encryption_version, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function - }, - config=self._config, - sdk_moniker=self._sdk_moniker, - client=self._client, - **kwargs) - if blob_type == BlobType.BlockBlob: - return cast(Dict[str, Any], await upload_block_blob(**options)) - if blob_type == BlobType.PageBlob: - return cast(Dict[str, Any], await upload_page_blob(**options)) - return cast(Dict[str, Any], await upload_append_blob(**options)) - - @overload - async def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: str, - **kwargs: Any - ) -> StorageStreamDownloader[str]: - ... - - @overload - async def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: None = None, - **kwargs: Any - ) -> StorageStreamDownloader[bytes]: - ... - - @distributed_trace_async - async def download_blob( - self, offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: Union[str, None] = None, - **kwargs: Any - ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: - """Downloads a blob to the StorageStreamDownloader. The readall() method must - be used to read all the content or readinto() must be used to download the blob into - a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. - - :param int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to download. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. If specified, download_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword str encoding: - Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. - :keyword progress_hook: - An async callback to track the progress of a long running download. The signature is - function(current: int, total: int) where current is the number of bytes transferred - so far, and total is the total size of the download. - :paramtype progress_hook: Callable[[int, int], Awaitable[None]] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: A streaming object (StorageStreamDownloader) - :rtype: ~azure.storage.blob.aio.StorageStreamDownloader - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world_async.py - :start-after: [START download_a_blob] - :end-before: [END download_a_blob] - :language: python - :dedent: 16 - :caption: Download a blob. - """ - if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): - raise ValueError("Encryption required but no key was provided.") - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _download_blob_options( - blob_name=self.blob_name, - container_name=self.container_name, - version_id=get_version_id(self.version_id, kwargs), - offset=offset, - length=length, - encoding=encoding, - encryption_options={ - 'required': self.require_encryption, - 'version': self.encryption_version, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function - }, - config=self._config, - sdk_moniker=self._sdk_moniker, - client=self._client, - **kwargs) - downloader = StorageStreamDownloader(**options) - await downloader._setup() # pylint: disable=protected-access - return downloader - - @distributed_trace_async - async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: - """Marks the specified blob for deletion. - - The blob is later deleted during garbage collection. - Note that in order to delete a blob, you must delete all of its - snapshots. You can delete both at the same time with the delete_blob() - operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blob - and retains the blob for a specified number of days. - After the specified number of days, the blob's data is removed from the service during garbage collection. - Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` - option. Soft-deleted blob can be restored using :func:`undelete` operation. - - :param str delete_snapshots: - Required if the blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to delete. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword lease: - Required if the blob has an active lease. If specified, delete_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world_async.py - :start-after: [START delete_blob] - :end-before: [END delete_blob] - :language: python - :dedent: 16 - :caption: Delete a blob. - """ - options = _delete_blob_options( - snapshot=self.snapshot, - version_id=get_version_id(self.version_id, kwargs), - delete_snapshots=delete_snapshots, - **kwargs) - try: - await self._client.blob.delete(**options) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def undelete_blob(self, **kwargs: Any) -> None: - """Restores soft-deleted blobs or snapshots. - - Operation will only be successful if used within the specified number of days - set in the delete retention policy. - - If blob versioning is enabled, the base blob cannot be restored using this - method. Instead use :func:`start_copy_from_url` with the URL of the blob version - you wish to promote to the current version. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START undelete_blob] - :end-before: [END undelete_blob] - :language: python - :dedent: 12 - :caption: Undeleting a blob. - """ - try: - await self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def exists(self, **kwargs: Any) -> bool: - """ - Returns True if a blob exists with the defined parameters, and returns - False otherwise. - - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to check if it exists. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: boolean - :rtype: bool - """ - version_id = get_version_id(self.version_id, kwargs) - try: - await self._client.blob.get_properties( - snapshot=self.snapshot, - version_id=version_id, - **kwargs) - return True - # Encrypted with CPK - except ResourceExistsError: - return True - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceNotFoundError: - return False - - @distributed_trace_async - async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: - """Returns all user-defined metadata, standard HTTP properties, and - system properties for the blob. It does not return the content of the blob. - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to get properties. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: BlobProperties - :rtype: ~azure.storage.blob.BlobProperties - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] - :language: python - :dedent: 12 - :caption: Getting the properties for a blob. - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - try: - cls_method = kwargs.pop('cls', None) - if cls_method: - kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) - blob_props = await self._client.blob.get_properties( - timeout=kwargs.pop('timeout', None), - version_id=version_id, - snapshot=self.snapshot, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=kwargs.pop('cls', None) or deserialize_blob_properties, - cpk_info=cpk_info, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - blob_props.name = self.blob_name - if isinstance(blob_props, BlobProperties): - blob_props.container = self.container_name - blob_props.snapshot = self.snapshot - return cast(BlobProperties, blob_props) - - @distributed_trace_async - async def set_http_headers( - self, content_settings: Optional["ContentSettings"] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Sets system properties on the blob. - - If one property is set for the content_settings, all properties will be overridden. - - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Any] - """ - options = _set_http_headers_options(content_settings=content_settings, **kwargs) - try: - return cast(Dict[str, Any], await self._client.blob.set_http_headers(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def set_blob_metadata( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets user-defined metadata for the blob as one or more name-value pairs. - - :param metadata: - Dict containing name and value pairs. Each call to this operation - replaces all existing metadata attached to the blob. To remove all - metadata from the blob, call this operation with no metadata headers. - :type metadata: dict(str, str) - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Union[str, datetime]] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _set_blob_metadata_options(metadata=metadata, **kwargs) - try: - return cast(Dict[str, Union[str, datetime]], await self._client.blob.set_metadata(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def set_immutability_policy( - self, immutability_policy: "ImmutabilityPolicy", - **kwargs: Any - ) -> Dict[str, str]: - """The Set Immutability Policy operation sets the immutability policy on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - - kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time - kwargs['immutability_policy_mode'] = immutability_policy.policy_mode - return cast(Dict[str, str], - await self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs)) - - @distributed_trace_async - async def delete_immutability_policy(self, **kwargs: Any) -> None: - """The Delete Immutability Policy operation deletes the immutability policy on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - - await self._client.blob.delete_immutability_policy(**kwargs) - - @distributed_trace_async - async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: - """The Set Legal Hold operation sets a legal hold on the blob. - - .. versionadded:: 12.10.0 - This operation was introduced in API version '2020-10-02'. - - :param bool legal_hold: - Specified if a legal hold should be set on the blob. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, Union[str, datetime, bool]] - """ - - return cast(Dict[str, Union[str, datetime, bool]], - await self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs)) - - @distributed_trace_async - async def create_page_blob( - self, size: int, - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a new Page Blob of the specified size. - - :param int size: - This specifies the maximum size for the page blob, up to 1 TB. - The page blob size must be aligned to a 512-byte boundary. - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword int sequence_number: - Only for Page blobs. The sequence number is a user-controlled value that you can use to - track requests. The value of the sequence number must be between 0 - and 2^63 - 1.The default value is 0. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_page_blob_options( - size=size, - content_settings=content_settings, - metadata=metadata, - premium_page_blob_tier=premium_page_blob_tier, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.page_blob.create(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def create_append_blob( - self, content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a new Append Blob. This operation creates a new 0-length append blob. The content - of any existing blob is overwritten with the newly initialized append blob. To add content to - the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. - - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_append_blob_options( - content_settings=content_settings, - metadata=metadata, - **kwargs) - try: - return cast(Dict[str, Union[str, datetime]], await self._client.append_blob.create(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def create_snapshot( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Creates a snapshot of the blob. - - A snapshot is a read-only version of a blob that's taken at a point in time. - It can be read, copied, or deleted, but not modified. Snapshots provide a way - to back up a blob as it appears at a moment in time. - - A snapshot of a blob has the same name as the base blob from which the snapshot - is taken, with a DateTime value appended to indicate the time at which the - snapshot was taken. - - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START create_blob_snapshot] - :end-before: [END create_blob_snapshot] - :language: python - :dedent: 12 - :caption: Create a snapshot of the blob. - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_snapshot_options(metadata=metadata, **kwargs) - try: - return cast(Dict[str, Any], await self._client.blob.create_snapshot(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def start_copy_from_url( - self, source_url: str, - metadata: Optional[Dict[str, str]] = None, - incremental_copy: bool = False, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Copies a blob from the given URL. - - This operation returns a dictionary containing `copy_status` and `copy_id`, - which can be used to check the status of or abort the copy operation. - `copy_status` will be 'success' if the copy completed synchronously or - 'pending' if the copy has been started asynchronously. For asynchronous copies, - the status can be checked by polling the :func:`get_blob_properties` method and - checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. - The Blob service copies blobs on a best-effort basis. - - The source blob for a copy operation may be a block blob, an append blob, - or a page blob. If the destination blob already exists, it must be of the - same blob type as the source blob. Any existing destination blob will be - overwritten. The destination blob cannot be modified while a copy operation - is in progress. - - When copying from a page blob, the Blob service creates a destination page - blob of the source blob's length, initially containing all zeroes. Then - the source page ranges are enumerated, and non-empty ranges are copied. - - For a block blob or an append blob, the Blob service creates a committed - blob of zero length before returning from this operation. When copying - from a block blob, all committed blocks and their block IDs are copied. - Uncommitted blocks are not copied. At the end of the copy operation, the - destination blob will have the same committed block count as the source. - - When copying from an append blob, all committed blocks are copied. At the - end of the copy operation, the destination blob will have the same committed - block count as the source. - - :param str source_url: - A URL of up to 2 KB in length that specifies a file or blob. - The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. - Examples: - https://myaccount.blob.core.windows.net/mycontainer/myblob - - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - - https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :param metadata: - Name-value pairs associated with the blob as metadata. If no name-value - pairs are specified, the operation will copy the metadata from the - source blob or file to the destination blob. If one or more name-value - pairs are specified, the destination blob is created with the specified - metadata, and metadata is not copied from the source blob or file. - :type metadata: dict(str, str) - :param bool incremental_copy: - Copies the snapshot of the source page blob to a destination page blob. - The snapshot is copied such that only the differential changes between - the previously copied snapshot are transferred to the destination. - The copied snapshots are complete copies of the original snapshot and - can be read or copied from as usual. Defaults to False. - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). - - The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. - This option is only available when `incremental_copy=False` and `requires_sync=True`. - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) or Literal["COPY"] - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only if the source - blob has been modified since the specified date/time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only if the source blob - has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only - if the destination blob has been modified since the specified date/time. - If the destination blob has not been modified, the Blob service returns - status code 412 (Precondition Failed). - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this conditional header to copy the blob only - if the destination blob has not been modified since the specified - date/time. If the destination blob has been modified, the Blob service - returns status code 412 (Precondition Failed). - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword destination_lease: - The lease ID specified for this header must match the lease ID of the - destination blob. If the request does not include the lease ID or it is not - valid, the operation fails with status code 412 (Precondition Failed). - :paramtype destination_lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword source_lease: - Specify this to perform the Copy Blob operation only if - the lease ID given matches the active lease ID of the source blob. - :paramtype source_lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword bool seal_destination_blob: - Seal the destination append blob. This operation is only for append blob. - - .. versionadded:: 12.4.0 - - :keyword bool requires_sync: - Enforces that the service will not return a response until the copy is complete. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. This option is only available when `incremental_copy` is - set to False and `requires_sync` is set to True. - - .. versionadded:: 12.9.0 - - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.10.0 - - :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, Union[str, ~datetime.datetime]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START copy_blob_from_url] - :end-before: [END copy_blob_from_url] - :language: python - :dedent: 16 - :caption: Copy a blob from a URL. - """ - options = _start_copy_from_url_options( - source_url=source_url, - metadata=metadata, - incremental_copy=incremental_copy, - **kwargs) - try: - if incremental_copy: - return cast(Dict[str, Union[str, datetime]], await self._client.page_blob.copy_incremental(**options)) - return cast(Dict[str, Union[str, datetime]], await self._client.blob.start_copy_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def abort_copy( - self, copy_id: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> None: - """Abort an ongoing copy operation. - - This will leave a destination blob with zero length and full metadata. - This will raise an error if the copy operation has already ended. - - :param copy_id: - The copy operation to abort. This can be either an ID, or an - instance of BlobProperties. - :type copy_id: str or ~azure.storage.blob.BlobProperties - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START abort_copy_blob_from_url] - :end-before: [END abort_copy_blob_from_url] - :language: python - :dedent: 16 - :caption: Abort copying a blob from URL. - """ - options = _abort_copy_options(copy_id, **kwargs) - try: - await self._client.blob.abort_copy_from_url(**options) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def acquire_lease( - self, lease_duration: int =-1, - lease_id: Optional[str] = None, - **kwargs: Any - ) -> BlobLeaseClient: - """Requests a new lease. - - If the blob does not have an active lease, the Blob - Service creates a lease on the blob and returns a new lease. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :param str lease_id: - Proposed lease ID, in a GUID string format. The Blob Service - returns 400 (Invalid request) if the proposed lease ID is not - in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A BlobLeaseClient object. - :rtype: ~azure.storage.blob.aio.BlobLeaseClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START acquire_lease_on_blob] - :end-before: [END acquire_lease_on_blob] - :language: python - :dedent: 12 - :caption: Acquiring a lease on a blob. - """ - lease = BlobLeaseClient(self, lease_id=lease_id) - await lease.acquire(lease_duration=lease_duration, **kwargs) - return lease - - @distributed_trace_async - async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: - """This operation sets the tier on a block blob. - - A block blob's tier determines Hot/Cool/Archive storage type. - This operation does not update the blob's ETag. - - :param standard_blob_tier: - Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', - 'Archive'. The hot tier is optimized for storing data that is accessed - frequently. The cool storage tier is optimized for storing data that - is infrequently accessed and stored for at least a month. The archive - tier is optimized for storing data that is rarely accessed and stored - for at least six months with flexible latency requirements. - :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :rtype: None - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - if standard_blob_tier is None: - raise ValueError("A StandardBlobTier must be specified") - try: - await self._client.blob.set_tier( - tier=standard_blob_tier, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - lease_access_conditions=access_conditions, - version_id=version_id, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def stage_block( - self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new block to be committed as part of a blob. - - :param str block_id: A string value that identifies the block. - The string should be less than or equal to 64 bytes in size. - For a given blob, the block_id must be the same size for each block. - :param data: The blob data. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param int length: Size of the block. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword str encoding: - Defaults to UTF-8. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob property dict. - :rtype: Dict[str, Any] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _stage_block_options( - block_id=block_id, - data=data, - length=length, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.block_blob.stage_block(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def stage_block_from_url( - self, block_id: str, - source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - source_content_md5: Optional[Union[bytes, bytearray]] = None, - **kwargs: Any - ) -> Dict[str, Any]: - """Creates a new block to be committed as part of a blob where - the contents are read from a URL. - - :param str block_id: A string value that identifies the block. - The string should be less than or equal to 64 bytes in size. - For a given blob, the block_id must be the same size for each block. - :param str source_url: The URL. - :param int source_offset: - Start of byte range to use for the block. - Must be set if source length is provided. - :param int source_length: The size of the block in bytes. - :param bytearray source_content_md5: - Specify the md5 calculated for the range of - bytes that must be read from the copy source. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Blob property dict. - :rtype: Dict[str, Any] - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _stage_block_from_url_options( - block_id=block_id, - source_url=source_url, - source_offset=source_offset, - source_length=source_length, - source_content_md5=source_content_md5, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.block_blob.stage_block_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_block_list( - self, block_list_type: str = "committed", - **kwargs: Any - ) -> Tuple[List[BlobBlock], List[BlobBlock]]: - """The Get Block List operation retrieves the list of blocks that have - been uploaded as part of a block blob. - - :param str block_list_type: - Specifies whether to return the list of committed - blocks, the list of uncommitted blocks, or both lists together. - Possible values include: 'committed', 'uncommitted', 'all' - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: Tuple[List[BlobBlock], List[BlobBlock]] - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - try: - blocks = await self._client.block_blob.get_block_list( - list_type=block_list_type, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return _get_block_list_result(blocks) - - @distributed_trace_async - async def commit_block_list( - self, block_list: List[BlobBlock], - content_settings: Optional["ContentSettings"] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """The Commit Block List operation writes a blob by specifying the list of - block IDs that make up the blob. - - :param list block_list: - List of Blockblobs. - :param ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict[str, str] - :keyword tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - - .. versionadded:: 12.4.0 - - :paramtype tags: dict(str, str) - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: - Specifies the immutability policy of a blob, blob snapshot or blob version. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool legal_hold: - Specified if a legal hold should be set on the blob. - - .. versionadded:: 12.10.0 - This was introduced in API version '2020-10-02'. - - :keyword bool validate_content: - If true, calculates an MD5 hash of the page content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _commit_block_list_options( - block_list=block_list, - content_settings=content_settings, - metadata=metadata, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.block_blob.commit_block_list(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: - """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. - - :param premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :rtype: None - """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if premium_page_blob_tier is None: - raise ValueError("A PremiumPageBlobTiermust be specified") - try: - await self._client.blob.set_tier( - tier=premium_page_blob_tier, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: - """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. - Each call to this operation replaces all existing tags attached to the blob. To remove all - tags from the blob, call this operation with no tags set. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :param tags: - Name-value pairs associated with the blob as tag. Tags are case-sensitive. - The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, - and tag values must be between 0 and 256 characters. - Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :type tags: dict(str, str) - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to delete. - :keyword bool validate_content: - If true, calculates an MD5 hash of the tags content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: Dict[str, Any] - """ - version_id = get_version_id(self.version_id, kwargs) - options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) - try: - return cast(Dict[str, Any], await self._client.blob.set_tags(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: - """The Get Tags operation enables users to get tags on a blob or specific blob version, but not snapshot. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to add tags to. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Key value pairs of blob tags. - :rtype: Dict[str, str] - """ - version_id = get_version_id(self.version_id, kwargs) - options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) - try: - _, tags = await self._client.blob.get_tags(**options) - return cast(Dict[str, str], parse_tags(tags)) # pylint: disable=protected-access - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_page_ranges( - self, offset: Optional[int] = None, - length: Optional[int] = None, - previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: - """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot - of a page blob. - - :param int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param str previous_snapshot_diff: - The snapshot diff parameter that contains an opaque DateTime value that - specifies a previous blob snapshot to be compared - against a more recent snapshot or the current blob. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: - A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. - The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) - """ - warnings.warn( - "get_page_ranges is deprecated, use list_page_ranges instead", - DeprecationWarning - ) - - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - previous_snapshot_diff=previous_snapshot_diff, - **kwargs) - try: - if previous_snapshot_diff: - ranges = await self._client.page_blob.get_page_ranges_diff(**options) - else: - ranges = await self._client.page_blob.get_page_ranges(**options) - except HttpResponseError as error: - process_storage_error(error) - return get_page_ranges_result(ranges) - - @distributed_trace - def list_page_ranges( - self, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, - **kwargs: Any - ) -> AsyncItemPaged[PageRange]: - """Returns the list of valid page ranges for a Page Blob or snapshot - of a page blob. If `previous_snapshot` is specified, the result will be - a diff of changes between the target blob and the previous snapshot. - - :keyword int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword previous_snapshot: - A snapshot value that specifies that the response will contain only pages that were changed - between target blob and previous snapshot. Changed pages include both updated and cleared - pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` - is the older of the two. - :paramtype previous_snapshot: str or Dict[str, Any] - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int results_per_page: - The maximum number of page ranges to retrieve per API call. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) of PageRange. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] - """ - results_per_page = kwargs.pop('results_per_page', None) - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - previous_snapshot_diff=previous_snapshot, - **kwargs) - - if previous_snapshot: - command = partial( - self._client.page_blob.get_page_ranges_diff, - **options) - else: - command = partial( - self._client.page_blob.get_page_ranges, - **options) - return AsyncItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=PageRangePaged) - - @distributed_trace_async - async def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url: str, - offset: Optional[int] = None, - length: Optional[int] = None, - **kwargs: Any - ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: - """Returns the list of valid page ranges for a managed disk or snapshot. - - .. note:: - This operation is only available for managed disk accounts. - - .. versionadded:: 12.2.0 - This operation was introduced in API version '2019-07-07'. - - :param str previous_snapshot_url: - Specifies the URL of a previous snapshot of the managed disk. - The response will only contain pages that were changed between the target blob and - its previous snapshot. - :param int offset: - Start of byte range to use for getting valid page ranges. - If no length is given, all bytes after the offset will be searched. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for getting valid page ranges. - If length is given, offset must be provided. - This range will return valid page ranges from the offset start up to - the specified length. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: - A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. - The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) - """ - options = _get_page_ranges_options( - snapshot=self.snapshot, - offset=offset, - length=length, - prev_snapshot_url=previous_snapshot_url, - **kwargs) - try: - ranges = await self._client.page_blob.get_page_ranges_diff(**options) - except HttpResponseError as error: - process_storage_error(error) - return get_page_ranges_result(ranges) - - @distributed_trace_async - async def set_sequence_number( - self, sequence_number_action: Union[str, "SequenceNumberAction"], - sequence_number: Optional[str] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets the blob sequence number. - - :param str sequence_number_action: - This property indicates how the service should modify the blob's sequence - number. See :class:`~azure.storage.blob.SequenceNumberAction` for more information. - :param str sequence_number: - This property sets the blob's sequence number. The sequence number is a - user-controlled property that you can use to track requests and manage - concurrency issues. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) - try: - return cast(Dict[str, Any], await self._client.page_blob.update_sequence_number(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: - """Resizes a page blob to the specified size. - - If the specified value is less than the current size of the blob, - then all pages above the specified value are cleared. - - :param int size: - Size used to resize blob. Maximum size for a page blob is up to 1 TB. - The page blob size must be aligned to a 512-byte boundary. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _resize_blob_options(size=size, **kwargs) - try: - return cast(Dict[str, Any], await self._client.page_blob.resize(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def upload_page( - self, page: bytes, - offset: int, - length: int, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """The Upload Pages operation writes a range of pages to a page blob. - - :param bytes page: - Content of the page. - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword bool validate_content: - If true, calculates an MD5 hash of the page content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_page_options( - page=page, - offset=offset, - length=length, - **kwargs) - try: - return cast(Dict[str, Any], await self._client.page_blob.upload_pages(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def upload_pages_from_url( - self, source_url: str, - offset: int, - length: int, - source_offset: int, - **kwargs: Any - ) -> Dict[str, Any]: - """ - The Upload Pages operation writes a range of pages to a page blob where - the contents are read from a URL. - - :param str source_url: - The URL of the source data. It can point to any Azure Blob or File, that is either public or has a - shared access signature attached. - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int source_offset: - This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. - The service will read the same number of bytes as the destination range (length-offset). - :keyword bytes source_content_md5: - If given, the service will calculate the MD5 hash of the block content and compare against this value. - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Response after uploading pages from specified URL. - :rtype: Dict[str, Any] - """ - - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _upload_pages_from_url_options( - source_url=source_url, - offset=offset, - length=length, - source_offset=source_offset, - **kwargs - ) - try: - return cast(Dict[str, Any], await self._client.page_blob.upload_pages_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: - """Clears a range of pages. - - :param int offset: - Start of byte range to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :param int length: - Number of bytes to use for writing to a section of the blob. - Pages must be aligned with 512-byte boundaries, the start offset - must be a modulus of 512 and the length must be a modulus of - 512. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int if_sequence_number_lte: - If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_lt: - If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - :keyword int if_sequence_number_eq: - If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _clear_page_options( - offset=offset, - length=length, - **kwargs - ) - try: - return cast(Dict[str, Any], await self._client.page_blob.clear_pages(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def append_block( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], - length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime, int]]: - """Commits a new block of data to the end of the existing append blob. - - :param data: - Content of the block. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param int length: - Size of the block in bytes. - :keyword bool validate_content: - If true, calculates an MD5 hash of the block content. The storage - service checks the hash of the content that has arrived - with the hash that was sent. This is primarily valuable for detecting - bitflips on the wire if using http instead of https, as https (the default), - will already validate. Note that this MD5 hash is not stored with the - blob. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _append_block_options( - data=data, - length=length, - **kwargs - ) - try: - return cast(Dict[str, Any], await self._client.append_blob.append_block(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def append_block_from_url( - self, copy_source_url: str, - source_offset: Optional[int] = None, - source_length: Optional[int] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime, int]]: - """ - Creates a new block to be committed as part of a blob, where the contents are read from a source url. - - :param str copy_source_url: - The URL of the source data. It can point to any Azure Blob or File, that is either public or has a - shared access signature attached. - :param int source_offset: - This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. - :param int source_length: - This indicates the end of the range of bytes that has to be taken from the copy source. - :keyword bytearray source_content_md5: - If given, the service will calculate the MD5 hash of the block content and compare against this value. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the - AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The destination match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~datetime.datetime source_if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the source resource has been modified since the specified time. - :keyword ~datetime.datetime source_if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the source resource has not been modified since the specified date/time. - :keyword str source_etag: - The source ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions source_match_condition: - The source match condition to use upon the etag. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. - :returns: Result after appending a new block. - :rtype: Dict[str, Union[str, datetime, int]] - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _append_block_from_url_options( - copy_source_url=copy_source_url, - source_offset=source_offset, - source_length=source_length, - **kwargs - ) - try: - return cast(Dict[str, Union[str, datetime, int]], - await self._client.append_blob.append_block_from_url(**options)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: - """The Seal operation seals the Append Blob to make it read-only. - - .. versionadded:: 12.4.0 - - :keyword int appendpos_condition: - Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will - succeed only if the append position is equal to this number. If it - is not, the request will fail with the AppendPositionConditionNotMet error - (HTTP status code 412 - Precondition Failed). - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) - """ - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - options = _seal_append_blob_options(**kwargs) - try: - return cast(Dict[str, Any], await self._client.append_blob.seal(**options)) - except HttpResponseError as error: - process_storage_error(error) - - def _get_container_client(self) -> "ContainerClient": - """Get a client to interact with the blob's parent container. - - The container need not already exist. Defaults to current blob's credentials. - - :returns: A ContainerClient. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START get_container_client_from_blob_client] - :end-before: [END get_container_client_from_blob_client] - :language: python - :dedent: 12 - :caption: Get container client from blob object. - """ - from ._container_client_async import ContainerClient - if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access - _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=cast(Iterable["AsyncHTTPPolicy"], - self._pipeline._impl_policies) # pylint: disable = protected-access - ) - else: - _pipeline = self._pipeline # pylint: disable = protected-access - return ContainerClient( - f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py deleted file mode 100644 index b3ce734e58cf..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py +++ /dev/null @@ -1,795 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method, docstring-keyword-should-match-keyword-only - -import functools -import warnings -from typing import ( - Any, cast, Dict, Iterable, List, Optional, Union, - TYPE_CHECKING -) -from typing_extensions import Self - -from azure.core.async_paging import AsyncItemPaged -from azure.core.exceptions import HttpResponseError -from azure.core.pipeline import AsyncPipeline -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async - -from ._blob_client_async import BlobClient -from ._container_client_async import ContainerClient -from ._models import ContainerPropertiesPaged, FilteredBlobPaged -from .._blob_service_client_helpers import _parse_url -from .._deserialize import service_properties_deserialize, service_stats_deserialize -from .._encryption import StorageEncryptionMixin -from .._generated.aio import AzureBlobStorage -from .._generated.models import StorageServiceProperties, KeyInfo -from .._models import BlobProperties, ContainerProperties, CorsRule -from .._serialize import get_api_version -from .._shared.base_client import parse_query, StorageAccountHostsMixin -from .._shared.base_client_async import parse_connection_str -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper -from .._shared.response_handlers import ( - parse_to_internal_user_delegation_key, - process_storage_error, - return_response_headers, -) -from .._shared.models import LocationMode -from .._shared.parser import _to_utc_datetime -from .._shared.policies_async import ExponentialRetry - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential - from azure.core.credentials_async import AsyncTokenCredential - from azure.core.pipeline.policies import AsyncHTTPPolicy - from datetime import datetime - from ._lease_async import BlobLeaseClient - from .._models import ( - BlobAnalyticsLogging, - FilteredBlob, - Metrics, - PublicAccess, - RetentionPolicy, - StaticWebsite - ) - from .._shared.models import UserDelegationKey - - -class BlobServiceClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=line-too-long - """A client to interact with the Blob Service at the account level. - - This client provides operations to retrieve and configure the account properties - as well as list, create and delete containers within the account. - For operations relating to a specific container or blob, clients for those entities - can also be retrieved using the `get_client` functions. - - :param str account_url: - The URL to the blob storage account. Any other entities included - in the URL path (e.g. container or blob) will be discarded. This URL can be optionally - authenticated with a SAS token. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication_async.py - :start-after: [START create_blob_service_client] - :end-before: [END create_blob_service_client] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient with account url and credential. - - .. literalinclude:: ../samples/blob_samples_authentication_async.py - :start-after: [START create_blob_service_client_oauth] - :end-before: [END create_blob_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient with Azure Identity credentials. - """ - - def __init__( - self, account_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - parsed_url, sas_token = _parse_url(account_url=account_url) - _, sas_token = parse_query(parsed_url.query) - self._query_str, credential = self._format_query_string(sas_token, credential) - super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] # pylint: disable=protected-access - self._configure_encryption(kwargs) - - def _format_url(self, hostname): - """Format the endpoint URL according to the current location - mode hostname. - - :param str hostname: - The hostname of the current location mode. - :returns: A formatted endpoint URL including current location mode hostname. - :rtype: str - """ - return f"{self.scheme}://{hostname}/{self._query_str}" - - @classmethod - def from_connection_string( - cls, conn_str: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create BlobServiceClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials_async.AsyncTokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A Blob service client. - :rtype: ~azure.storage.blob.BlobServiceClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string] - :end-before: [END auth_from_connection_string] - :language: python - :dedent: 8 - :caption: Creating the BlobServiceClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, credential=credential, **kwargs) - - @distributed_trace_async - async def get_user_delegation_key( - self, key_start_time: "datetime", - key_expiry_time: "datetime", - **kwargs: Any - ) -> "UserDelegationKey": - """ - Obtain a user delegation key for the purpose of signing SAS tokens. - A token credential must be present on the service object for this request to succeed. - - :param ~datetime.datetime key_start_time: - A DateTime value. Indicates when the key becomes valid. - :param ~datetime.datetime key_expiry_time: - A DateTime value. Indicates when the key stops being valid. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: The user delegation key. - :rtype: ~azure.storage.blob.UserDelegationKey - """ - key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) - timeout = kwargs.pop('timeout', None) - try: - user_delegation_key = await self._client.service.get_user_delegation_key(key_info=key_info, - timeout=timeout, - **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore - - @distributed_trace_async - async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START get_blob_service_account_info] - :end-before: [END get_blob_service_account_info] - :language: python - :dedent: 12 - :caption: Getting account information for the blob service. - """ - try: - return await self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: - """Retrieves statistics related to replication for the Blob service. - - It is only available when read-access geo-redundant replication is enabled for - the storage account. - - With geo-redundant replication, Azure Storage maintains your data durable - in two locations. In both locations, Azure Storage constantly maintains - multiple healthy replicas of your data. The location where you read, - create, update, or delete data is the primary storage account location. - The primary location exists in the region you choose at the time you - create an account via the Azure Management Azure classic portal, for - example, North Central US. The location to which your data is replicated - is the secondary location. The secondary location is automatically - determined based on the location of the primary; it is in a second data - center that resides in the same region as the primary location. Read-only - access is available from the secondary location, if read-access geo-redundant - replication is enabled for your storage account. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: The blob service stats. - :rtype: Dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START get_blob_service_stats] - :end-before: [END get_blob_service_stats] - :language: python - :dedent: 12 - :caption: Getting service stats for the blob service. - """ - timeout = kwargs.pop('timeout', None) - try: - stats = await self._client.service.get_statistics( # type: ignore - timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) - return service_stats_deserialize(stats) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: - """Gets the properties of a storage account's Blob service, including - Azure Storage Analytics. - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An object containing blob service properties such as - analytics logging, hour/minute metrics, cors rules, etc. - :rtype: Dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START get_blob_service_properties] - :end-before: [END get_blob_service_properties] - :language: python - :dedent: 12 - :caption: Getting service properties for the blob service. - """ - timeout = kwargs.pop('timeout', None) - try: - service_props = await self._client.service.get_properties(timeout=timeout, **kwargs) - return service_properties_deserialize(service_props) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def set_service_properties( - self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, - hour_metrics: Optional["Metrics"] = None, - minute_metrics: Optional["Metrics"] = None, - cors: Optional[List[CorsRule]] = None, - target_version: Optional[str] = None, - delete_retention_policy: Optional["RetentionPolicy"] = None, - static_website: Optional["StaticWebsite"] = None, - **kwargs: Any - ) -> None: - """Sets the properties of a storage account's Blob service, including - Azure Storage Analytics. - - If an element (e.g. analytics_logging) is left as None, the - existing settings on the service for that functionality are preserved. - - :param analytics_logging: - Groups the Azure Analytics Logging settings. - :type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging - :param hour_metrics: - The hour metrics settings provide a summary of request - statistics grouped by API in hourly aggregates for blobs. - :type hour_metrics: ~azure.storage.blob.Metrics - :param minute_metrics: - The minute metrics settings provide request statistics - for each minute for blobs. - :type minute_metrics: ~azure.storage.blob.Metrics - :param cors: - You can include up to five CorsRule elements in the - list. If an empty list is specified, all CORS rules will be deleted, - and CORS will be disabled for the service. - :type cors: list[~azure.storage.blob.CorsRule] - :param str target_version: - Indicates the default version to use for requests if an incoming - request's version is not specified. - :param delete_retention_policy: - The delete retention policy specifies whether to retain deleted blobs. - It also specifies the number of days and versions of blob to keep. - :type delete_retention_policy: ~azure.storage.blob.RetentionPolicy - :param static_website: - Specifies whether the static website feature is enabled, - and if yes, indicates the index document and 404 error document to use. - :type static_website: ~azure.storage.blob.StaticWebsite - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START set_blob_service_properties] - :end-before: [END set_blob_service_properties] - :language: python - :dedent: 12 - :caption: Setting service properties for the blob service. - """ - if all(parameter is None for parameter in [ - analytics_logging, hour_metrics, minute_metrics, cors, - target_version, delete_retention_policy, static_website]): - raise ValueError("set_service_properties should be called with at least one parameter") - - props = StorageServiceProperties( - logging=analytics_logging, - hour_metrics=hour_metrics, - minute_metrics=minute_metrics, - cors=CorsRule._to_generated(cors), # pylint: disable=protected-access - default_service_version=target_version, - delete_retention_policy=delete_retention_policy, - static_website=static_website - ) - timeout = kwargs.pop('timeout', None) - try: - await self._client.service.set_properties(props, timeout=timeout, **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def list_containers( - self, name_starts_with: Optional[str] = None, - include_metadata: bool = False, - **kwargs: Any - ) -> AsyncItemPaged[ContainerProperties]: - """Returns a generator to list the containers under the specified account. - - The generator will lazily follow the continuation tokens returned by - the service and stop when all containers have been returned. - - :param str name_starts_with: - Filters the results to return only containers whose names - begin with the specified prefix. - :param bool include_metadata: - Specifies that container metadata to be returned in the response. - The default value is `False`. - :keyword bool include_deleted: - Specifies that deleted containers to be returned in the response. This is for container restore enabled - account. The default value is `False`. - .. versionadded:: 12.4.0 - :keyword bool include_system: - Flag specifying that system containers should be included. - .. versionadded:: 12.10.0 - :keyword int results_per_page: - The maximum number of container names to retrieve per API - call. If the request does not specify the server will return up to 5,000 items. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) of ContainerProperties. - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.ContainerProperties] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START bsc_list_containers] - :end-before: [END bsc_list_containers] - :language: python - :dedent: 16 - :caption: Listing the containers in the blob service. - """ - include = ['metadata'] if include_metadata else [] - include_deleted = kwargs.pop('include_deleted', None) - if include_deleted: - include.append("deleted") - include_system = kwargs.pop('include_system', None) - if include_system: - include.append("system") - timeout = kwargs.pop('timeout', None) - results_per_page = kwargs.pop('results_per_page', None) - command = functools.partial( - self._client.service.list_containers_segment, - prefix=name_starts_with, - include=include, - timeout=timeout, - **kwargs) - return AsyncItemPaged( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - page_iterator_class=ContainerPropertiesPaged - ) - - @distributed_trace - def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> AsyncItemPaged["FilteredBlob"]: - """The Filter Blobs operation enables callers to list blobs across all - containers whose tags match a given search expression. Filter blobs - searches across all containers within a storage account but can be - scoped within the expression to a single container. - - :param str filter_expression: - The expression to find blobs whose tags matches the specified condition. - eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" - To specify a container, eg. "@container='containerName' and \"Name\"='C'" - :keyword int results_per_page: - The max result per page when paginating. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.FilteredBlob] - """ - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.service.filter_blobs, - where=filter_expression, - timeout=timeout, - **kwargs) - return AsyncItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=FilteredBlobPaged) - - @distributed_trace_async - async def create_container( - self, name: str, - metadata: Optional[Dict[str, str]] = None, - public_access: Optional[Union["PublicAccess", str]] = None, - **kwargs: Any - ) -> ContainerClient: - """Creates a new container under the specified account. - - If the container with the same name already exists, a ResourceExistsError will - be raised. This method returns a client with which to interact with the newly - created container. - - :param str name: The name of the container to create. - :param metadata: - A dict with name-value pairs to associate with the - container as metadata. Example: `{'Category':'test'}` - :type metadata: dict(str, str) - :param public_access: - Possible values include: 'container', 'blob'. - :type public_access: str or ~azure.storage.blob.PublicAccess - :keyword container_encryption_scope: - Specifies the default encryption scope to set on the container and use for - all future writes. - - .. versionadded:: 12.2.0 - - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A container client to interact with the newly created container. - :rtype: ~azure.storage.blob.aio.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START bsc_create_container] - :end-before: [END bsc_create_container] - :language: python - :dedent: 16 - :caption: Creating a container in the blob service. - """ - container = self.get_container_client(name) - timeout = kwargs.pop('timeout', None) - kwargs.setdefault('merge_span', True) - await container.create_container( - metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) - return container - - @distributed_trace_async - async def delete_container( - self, container: Union[ContainerProperties, str], - lease: Optional[Union["BlobLeaseClient", str]] = None, - **kwargs: Any - ) -> None: - """Marks the specified container for deletion. - - The container and any blobs contained within it are later deleted during garbage collection. - If the container is not found, a ResourceNotFoundError will be raised. - - :param container: - The container to delete. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :param lease: - If specified, delete_container only succeeds if the - container's lease is active and matches this ID. - Required if the container has an active lease. - :type lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START bsc_delete_container] - :end-before: [END bsc_delete_container] - :language: python - :dedent: 16 - :caption: Deleting a container in the blob service. - """ - container_client = self.get_container_client(container) - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - await container_client.delete_container( - lease=lease, - timeout=timeout, - **kwargs) - - @distributed_trace_async - async def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: - """Renames a container. - - Operation is successful only if the source container exists. - - :param str name: - The name of the container to rename. - :param str new_name: - The new container name the user wants to rename to. - :keyword lease: - Specify this to perform only if the lease ID given - matches the active lease ID of the source container. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A container client for the renamed container. - :rtype: ~azure.storage.blob.ContainerClient - """ - renamed_container = self.get_container_client(new_name) - lease = kwargs.pop('lease', None) - try: - kwargs['source_lease_id'] = lease.id - except AttributeError: - kwargs['source_lease_id'] = lease - try: - await renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access - return renamed_container - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def undelete_container( - self, deleted_container_name: str, - deleted_container_version: str, - **kwargs: Any - ) -> ContainerClient: - """Restores soft-deleted container. - - Operation will only be successful if used within the specified number of days - set in the delete retention policy. - - .. versionadded:: 12.4.0 - This operation was introduced in API version '2019-12-12'. - - :param str deleted_container_name: - Specifies the name of the deleted container to restore. - :param str deleted_container_version: - Specifies the version of the deleted container to restore. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The recovered soft-deleted ContainerClient. - :rtype: ~azure.storage.blob.aio.ContainerClient - """ - new_name = kwargs.pop('new_name', None) - if new_name: - warnings.warn("`new_name` is no longer supported.", DeprecationWarning) - container = self.get_container_client(new_name or deleted_container_name) - try: - await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access - deleted_container_version=deleted_container_version, - timeout=kwargs.pop('timeout', None), **kwargs) - return container - except HttpResponseError as error: - process_storage_error(error) - - def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: - """Get a client to interact with the specified container. - - The container need not already exist. - - :param container: - The container. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :returns: A ContainerClient. - :rtype: ~azure.storage.blob.aio.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START bsc_get_container_client] - :end-before: [END bsc_get_container_client] - :language: python - :dedent: 12 - :caption: Getting the container client to interact with a specific container. - """ - if isinstance(container, ContainerProperties): - container_name = container.name - else: - container_name = container - _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access - ) - return ContainerClient( - self.url, container_name=container_name, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - - def get_blob_client( - self, container: Union[ContainerProperties, str], - blob: str, - snapshot: Optional[Union[Dict[str, Any], str]] = None, - *, - version_id: Optional[str] = None - ) -> BlobClient: - """Get a client to interact with the specified blob. - - The blob need not already exist. - - :param container: - The container that the blob is in. This can either be the name of the container, - or an instance of ContainerProperties. - :type container: str or ~azure.storage.blob.ContainerProperties - :param str blob: - The blob with which to interact. - :param snapshot: - The optional blob snapshot on which to operate. This can either be the ID of the snapshot, - or a dictionary output returned by - :func:`~azure.storage.blob.aio.BlobClient.create_snapshot()`. - :type snapshot: str or dict(str, Any) - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :returns: A BlobClient. - :rtype: ~azure.storage.blob.aio.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START bsc_get_blob_client] - :end-before: [END bsc_get_blob_client] - :language: python - :dedent: 16 - :caption: Getting the blob client to interact with a specific blob. - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_name = blob.name - else: - blob_name = blob - if isinstance(container, ContainerProperties): - container_name = container.name - else: - container_name = container - _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=cast(Iterable["AsyncHTTPPolicy"], - self._pipeline._impl_policies) # pylint: disable = protected-access - ) - return BlobClient( - self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, - version_id=version_id) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_client.py similarity index 74% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/aio/_client.py index c76a291f3c6a..77dd210004bd 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_client.py @@ -2,21 +2,21 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable +from typing import Any, Awaitable, TYPE_CHECKING, Union from typing_extensions import Self from azure.core import AsyncPipelineClient +from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest -from .. import models as _models from .._serialization import Deserializer, Serializer -from ._configuration import AzureBlobStorageConfiguration +from ._configuration import BlobClientConfiguration from .operations import ( AppendBlobOperations, BlobOperations, @@ -26,9 +26,13 @@ ServiceOperations, ) +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential -class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword - """AzureBlobStorage. + +class BlobClient: # pylint: disable=client-accepts-api-version-keyword + """BlobClient. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.aio.operations.ServiceOperations @@ -42,20 +46,20 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :vartype append_blob: azure.storage.blob.aio.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.aio.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the target of the - desired operation. Required. - :type url: str - :param base_url: Service URL. Required. Default value is "". - :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2024-08-04". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param endpoint: The host name of the blob storage account, e.g. + accountName.blob.core.windows.net. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a + AzureKeyCredential type or a TokenCredential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any ) -> None: - self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + _endpoint = "{endpoint}" + self._config = BlobClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -73,11 +77,10 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) @@ -86,7 +89,7 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request( + def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. @@ -94,7 +97,7 @@ def _send_request( >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = await client._send_request(request) + >>> response = await client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -107,7 +110,11 @@ def _send_request( """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_configuration.py new file mode 100644 index 000000000000..a47f49865802 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_configuration.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class BlobClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BlobClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: The host name of the blob storage account, e.g. + accountName.blob.core.windows.net. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a + AzureKeyCredential type or a TokenCredential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any + ) -> None: + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "storage-blob/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "api-key", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py deleted file mode 100644 index 169e4ae447ad..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ /dev/null @@ -1,1611 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, invalid-overridden-method, docstring-keyword-should-match-keyword-only - -import functools -import warnings -from datetime import datetime -from typing import ( - Any, AnyStr, AsyncIterable, AsyncIterator, cast, Dict, List, IO, Iterable, Optional, overload, Union, - TYPE_CHECKING -) -from urllib.parse import unquote, urlparse -from typing_extensions import Self - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceNotFoundError -from azure.core.pipeline import AsyncPipeline -from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async - -from ._blob_client_async import BlobClient -from ._download_async import StorageStreamDownloader -from ._lease_async import BlobLeaseClient -from ._list_blobs_helper import BlobNamesPaged, BlobPropertiesPaged, BlobPrefix -from ._models import FilteredBlobPaged -from .._container_client_helpers import ( - _format_url, - _generate_delete_blobs_options, - _generate_set_tiers_options, - _parse_url -) -from .._deserialize import deserialize_container_properties -from .._encryption import StorageEncryptionMixin -from .._generated.aio import AzureBlobStorage -from .._generated.models import SignedIdentifier -from .._list_blobs_helper import IgnoreListBlobsDeserializer -from .._models import ContainerProperties, BlobType, BlobProperties, FilteredBlob -from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions -from .._shared.base_client import StorageAccountHostsMixin -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str -from .._shared.policies_async import ExponentialRetry -from .._shared.request_handlers import add_metadata_headers, serialize_iso -from .._shared.response_handlers import ( - process_storage_error, - return_headers_and_deserialized, - return_response_headers -) - -if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential - from azure.core.credentials_async import AsyncTokenCredential - from ._blob_service_client_async import BlobServiceClient - from .._models import ( - AccessPolicy, - StandardBlobTier, - PremiumPageBlobTier, - PublicAccess - ) - - -class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods - """A client to interact with a specific container, although that container - may not yet exist. - - For operations relating to a specific blob within this container, a blob client can be - retrieved using the :func:`~get_blob_client` function. - - :param str account_url: - The URI to the storage account. In order to create a client given the full URI to the container, - use the :func:`from_container_url` classmethod. - :param container_name: - The name of the container for the blob. - :type container_name: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :keyword str api_version: - The Storage API version to use for requests. Default value is the most recent service version that is - compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. - - .. versionadded:: 12.2.0 - - :keyword str secondary_hostname: - The hostname of the secondary endpoint. - :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. - Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be - uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, - the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. - :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient - algorithm when uploading a block blob. Defaults to 4*1024*1024+1. - :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. - :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. - :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, - the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. - :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, - or 4MB. - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START create_container_client_from_service] - :end-before: [END create_container_client_from_service] - :language: python - :dedent: 8 - :caption: Get a ContainerClient from an existing BlobServiceClient. - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START create_container_client_sasurl] - :end-before: [END create_container_client_sasurl] - :language: python - :dedent: 12 - :caption: Creating the container client directly. - """ - def __init__( - self, account_url: str, - container_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) - - self.container_name = container_name - # This parameter is used for the hierarchy traversal. Give precedence to credential. - self._raw_credential = credential if credential else sas_token - self._query_str, credential = self._format_query_string(sas_token, credential) - super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._api_version = get_api_version(kwargs) - self._client = self._build_generated_client() - self._configure_encryption(kwargs) - - def _build_generated_client(self) -> AzureBlobStorage: - client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access - return client - - def _format_url(self, hostname): - return _format_url( - container_name=self.container_name, - hostname=hostname, - scheme=self.scheme, - query_str=self._query_str - ) - - @classmethod - def from_container_url( - cls, container_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create ContainerClient from a container url. - - :param str container_url: - The full endpoint URL to the Container, including SAS token if used. This could be - either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. - :type container_url: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials_async.AsyncTokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A container client. - :rtype: ~azure.storage.blob.ContainerClient - """ - try: - if not container_url.lower().startswith('http'): - container_url = "https://" + container_url - except AttributeError as exc: - raise ValueError("Container URL must be a string.") from exc - parsed_url = urlparse(container_url) - if not parsed_url.netloc: - raise ValueError(f"Invalid URL: {container_url}") - - container_path = parsed_url.path.strip('/').split('/') - account_path = "" - if len(container_path) > 1: - account_path = "/" + "/".join(container_path[:-1]) - account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" - container_name = unquote(container_path[-1]) - if not container_name: - raise ValueError("Invalid URL. Please provide a URL with a valid container name") - return cls(account_url, container_name=container_name, credential=credential, **kwargs) - - @classmethod - def from_connection_string( - cls, conn_str: str, - container_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any - ) -> Self: - """Create ContainerClient from a Connection String. - - :param str conn_str: - A connection string to an Azure Storage account. - :param container_name: - The container name for the blob. - :type container_name: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, - an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, - an account shared access key, or an instance of a TokenCredentials class from azure.identity. - Credentials provided here will take precedence over those in the connection string. - If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" - should be the storage account key. - :type credential: - ~azure.core.credentials.AzureNamedKeyCredential or - ~azure.core.credentials.AzureSasCredential or - ~azure.core.credentials_async.AsyncTokenCredential or - str or dict[str, str] or None - :keyword str audience: The audience to use when requesting tokens for Azure Active Directory - authentication. Only has an effect when credential is of type TokenCredential. The value could be - https://storage.azure.com/ (default) or https://.blob.core.windows.net. - :returns: A container client. - :rtype: ~azure.storage.blob.ContainerClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_authentication.py - :start-after: [START auth_from_connection_string_container] - :end-before: [END auth_from_connection_string_container] - :language: python - :dedent: 8 - :caption: Creating the ContainerClient from a connection string. - """ - account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls( - account_url, container_name=container_name, credential=credential, **kwargs) - - @distributed_trace_async - async def create_container( - self, metadata: Optional[Dict[str, str]] = None, - public_access: Optional[Union["PublicAccess", str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """ - Creates a new container under the specified account. If the container - with the same name already exists, the operation fails. - - :param metadata: - A dict with name_value pairs to associate with the - container as metadata. Example:{'Category':'test'} - :type metadata: dict[str, str] - :param ~azure.storage.blob.PublicAccess public_access: - Possible values include: 'container', 'blob'. - :keyword container_encryption_scope: - Specifies the default encryption scope to set on the container and use for - all future writes. - - .. versionadded:: 12.2.0 - - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A dictionary of response headers. - :rtype: Dict[str, Union[str, datetime]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START create_container] - :end-before: [END create_container] - :language: python - :dedent: 16 - :caption: Creating a container to store blobs. - """ - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) # type: ignore - timeout = kwargs.pop('timeout', None) - container_cpk_scope_info = get_container_cpk_scope_info(kwargs) - try: - return await self._client.container.create( # type: ignore - timeout=timeout, - access=public_access, - container_cpk_scope_info=container_cpk_scope_info, - cls=return_response_headers, - headers=headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": - """Renames a container. - - Operation is successful only if the source container exists. - - :param str new_name: - The new container name the user wants to rename to. - :keyword lease: - Specify this to perform only if the lease ID given - matches the active lease ID of the source container. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: The renamed container. - :rtype: ~azure.storage.blob.ContainerClient - """ - lease = kwargs.pop('lease', None) - try: - kwargs['source_lease_id'] = lease.id - except AttributeError: - kwargs['source_lease_id'] = lease - try: - renamed_container = ContainerClient( - f"{self.scheme}://{self.primary_hostname}", container_name=new_name, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - await renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access - return renamed_container - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def delete_container(self, **kwargs: Any) -> None: - """ - Marks the specified container for deletion. The container and any blobs - contained within it are later deleted during garbage collection. - - :keyword lease: - If specified, delete_container only succeeds if the - container's lease is active and matches this ID. - Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START delete_container] - :end-before: [END delete_container] - :language: python - :dedent: 16 - :caption: Delete a container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) - try: - await self._client.container.delete( - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def acquire_lease( - self, lease_duration: int =-1, - lease_id: Optional[str] = None, - **kwargs: Any - ) -> BlobLeaseClient: - """ - Requests a new lease. If the container does not have an active lease, - the Blob service creates a lease on the container and returns a new - lease ID. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :param str lease_id: - Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: A BlobLeaseClient object, that can be run in a context manager. - :rtype: ~azure.storage.blob.aio.BlobLeaseClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START acquire_lease_on_container] - :end-before: [END acquire_lease_on_container] - :language: python - :dedent: 12 - :caption: Acquiring a lease on the container. - """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - await lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) - return lease - - @distributed_trace_async - async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: - """Gets information related to the storage account. - - The information can also be retrieved if the user has a SAS to a container or blob. - The keys in the returned dictionary include 'sku_name' and 'account_kind'. - - :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) - """ - try: - return await self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace_async - async def get_container_properties(self, **kwargs: Any) -> ContainerProperties: - """Returns all user-defined metadata and system properties for the specified - container. The data returned does not include the container's list of blobs. - - :keyword lease: - If specified, get_container_properties only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: Properties for the specified container within a container object. - :rtype: ~azure.storage.blob.ContainerProperties - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START get_container_properties] - :end-before: [END get_container_properties] - :language: python - :dedent: 16 - :caption: Getting properties on the container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) - try: - response = await self._client.container.get_properties( - timeout=timeout, - lease_access_conditions=access_conditions, - cls=deserialize_container_properties, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - response.name = self.container_name - return response # type: ignore - - @distributed_trace_async - async def exists(self, **kwargs: Any) -> bool: - """ - Returns True if a container exists and returns False otherwise. - - :kwarg int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: boolean - :rtype: bool - """ - try: - await self._client.container.get_properties(**kwargs) - return True - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceNotFoundError: - return False - - @distributed_trace_async - async def set_container_metadata( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets one or more user-defined name-value pairs for the specified - container. Each call to this operation replaces all existing metadata - attached to the container. To remove all metadata from the container, - call this operation with no metadata dict. - - :param metadata: - A dict containing name-value pairs to associate with the container as - metadata. Example: {'category':'test'} - :type metadata: dict[str, str] - :keyword lease: - If specified, set_container_metadata only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Container-updated property dict (Etag and last modified). - :rtype: Dict[str, Union[str, datetime]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START set_container_metadata] - :end-before: [END set_container_metadata] - :language: python - :dedent: 16 - :caption: Setting metadata on the container. - """ - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) - try: - return await self._client.container.set_metadata( # type: ignore - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - headers=headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def _get_blob_service_client(self) -> "BlobServiceClient": # pylint: disable=client-method-missing-kwargs - """Get a client to interact with the container's parent service account. - - Defaults to current container's credentials. - - :returns: A BlobServiceClient. - :rtype: ~azure.storage.blob.BlobServiceClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_service_async.py - :start-after: [START get_blob_service_client_from_container_client] - :end-before: [END get_blob_service_client_from_container_client] - :language: python - :dedent: 8 - :caption: Get blob service client from container object. - """ - from ._blob_service_client_async import BlobServiceClient - if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access - _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access - ) - else: - _pipeline = self._pipeline # pylint: disable = protected-access - return BlobServiceClient( - f"{self.scheme}://{self.primary_hostname}", - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, - encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) - - - @distributed_trace_async - async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: - """Gets the permissions for the specified container. - The permissions indicate whether container data may be accessed publicly. - - :keyword lease: - If specified, get_container_access_policy only succeeds if the - container's lease is active and matches this ID. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Access policy information in a dict. - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START get_container_access_policy] - :end-before: [END get_container_access_policy] - :language: python - :dedent: 16 - :caption: Getting the access policy on the container. - """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) - try: - response, identifiers = await self._client.container.get_access_policy( - timeout=timeout, - lease_access_conditions=access_conditions, - cls=return_headers_and_deserialized, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return { - 'public_access': response.get('blob_public_access'), - 'signed_identifiers': identifiers or [] - } - - @distributed_trace_async - async def set_container_access_policy( - self, signed_identifiers: Dict[str, "AccessPolicy"], - public_access: Optional[Union[str, "PublicAccess"]] = None, - **kwargs: Any - ) -> Dict[str, Union[str, datetime]]: - """Sets the permissions for the specified container or stored access - policies that may be used with Shared Access Signatures. The permissions - indicate whether blobs in a container may be accessed publicly. - - :param signed_identifiers: - A dictionary of access policies to associate with the container. The - dictionary may contain up to 5 elements. An empty dictionary - will clear the access policies set on the service. - :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy] - :param ~azure.storage.blob.PublicAccess public_access: - Possible values include: 'container', 'blob'. - :keyword lease: - Required if the container has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A datetime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified date/time. - :keyword ~datetime.datetime if_unmodified_since: - A datetime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: Container-updated property dict (Etag and last modified). - :rtype: dict[str, str or ~datetime.datetime] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START set_container_access_policy] - :end-before: [END set_container_access_policy] - :language: python - :dedent: 16 - :caption: Setting access policy on the container. - """ - timeout = kwargs.pop('timeout', None) - lease = kwargs.pop('lease', None) - if len(signed_identifiers) > 5: - raise ValueError( - 'Too many access policies provided. The server does not support setting ' - 'more than 5 access policies on a single resource.') - identifiers = [] - for key, value in signed_identifiers.items(): - if value: - value.start = serialize_iso(value.start) - value.expiry = serialize_iso(value.expiry) - identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore - signed_identifiers = identifiers # type: ignore - - mod_conditions = get_modify_conditions(kwargs) - access_conditions = get_access_conditions(lease) - try: - return cast(Dict[str, Union[str, datetime]], await self._client.container.set_access_policy( - container_acl=signed_identifiers or None, - timeout=timeout, - access=public_access, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs)) - except HttpResponseError as error: - process_storage_error(error) - - @distributed_trace - def list_blobs( - self, name_starts_with: Optional[str] = None, - include: Optional[Union[str, List[str]]] = None, - **kwargs: Any - ) -> AsyncItemPaged[BlobProperties]: - """Returns a generator to list the blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. - - :param str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :param include: - Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', - 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START list_blobs_in_container] - :end-before: [END list_blobs_in_container] - :language: python - :dedent: 12 - :caption: List the blobs in the container. - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - if include and not isinstance(include, list): - include = [include] - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.list_blob_flat_segment, - include=include, - timeout=timeout, - **kwargs) - return AsyncItemPaged( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - container=self.container_name, - page_iterator_class=BlobPropertiesPaged - ) - - @distributed_trace - def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: - """Returns a generator to list the names of blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. - - Note that no additional properties or metadata will be returned when using this API. - Additionally this API does not have an option to include additional blobs such as snapshots, - versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. - - :keyword str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of blob names as strings. - :rtype: ~azure.core.async_paging.AsyncItemPaged[str] - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - name_starts_with = kwargs.pop('name_starts_with', None) - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - - # For listing only names we need to create a one-off generated client and - # override its deserializer to prevent deserialization of the full response. - client = self._build_generated_client() - client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access - - command = functools.partial( - client.container.list_blob_flat_segment, - timeout=timeout, - **kwargs) - return AsyncItemPaged( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - container=self.container_name, - page_iterator_class=BlobNamesPaged) - - @distributed_trace - def walk_blobs( - self, name_starts_with: Optional[str] = None, - include: Optional[Union[List[str], str]] = None, - delimiter: str = "/", - **kwargs: Any - ) -> AsyncItemPaged[BlobProperties]: - """Returns a generator to list the blobs under the specified container. - The generator will lazily follow the continuation tokens returned by - the service. This operation will list blobs in accordance with a hierarchy, - as delimited by the specified delimiter character. - - :param str name_starts_with: - Filters the results to return only blobs whose names - begin with the specified prefix. - :param include: - Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', - 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str - :param str delimiter: - When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose - names begin with the same substring up to the appearance of the delimiter - character. The delimiter may be a single character or a string. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of BlobProperties. - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] - """ - if kwargs.pop('prefix', None): - raise ValueError("Passing 'prefix' has no effect on filtering, " + - "please use the 'name_starts_with' parameter instead.") - - if include and not isinstance(include, list): - include = [include] - - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.list_blob_hierarchy_segment, - delimiter=delimiter, - include=include, - timeout=timeout, - **kwargs) - return BlobPrefix( - command, - prefix=name_starts_with, - results_per_page=results_per_page, - container=self.container_name, - delimiter=delimiter) - - @distributed_trace - def find_blobs_by_tags( - self, filter_expression: str, - **kwargs: Any - ) -> AsyncItemPaged[FilteredBlob]: - """Returns a generator to list the blobs under the specified container whose tags - match the given search expression. - The generator will lazily follow the continuation tokens returned by - the service. - - :param str filter_expression: - The expression to find blobs whose tags matches the specified condition. - eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" - :keyword int results_per_page: - The max result per page when paginating. - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :returns: An iterable (auto-paging) response of FilteredBlob. - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] - """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) - command = functools.partial( - self._client.container.filter_blobs, - timeout=timeout, - where=filter_expression, - **kwargs) - return AsyncItemPaged( - command, results_per_page=results_per_page, - container=self.container_name, - page_iterator_class=FilteredBlobPaged) - - @distributed_trace_async - async def upload_blob( - self, name: str, - data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], - blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, - length: Optional[int] = None, - metadata: Optional[Dict[str, str]] = None, - **kwargs - ) -> BlobClient: - """Creates a new blob from a data source with automatic chunking. - - :param str name: The blob with which to interact. - :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] - :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be - either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :param metadata: - Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) - :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. - If True, upload_blob will overwrite the existing data. If set to False, the - operation will fail with ResourceExistsError. The exception to the above is with Append - blob types: if set to False and the data already exists, an error will not be raised - and the data will be appended to the existing blob. If set overwrite=True, then the existing - append blob will be deleted, and a new one created. Defaults to False. - :keyword ~azure.storage.blob.ContentSettings content_settings: - ContentSettings object used to set blob properties. Used to set content type, encoding, - language, disposition, md5, and cache control. - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used, because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the container has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: - A page blob tier value to set the blob to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: - A standard blob tier value to set the blob to. For this version of the library, - this is only applicable to block blobs on standard storage accounts. - :keyword int maxsize_condition: - Optional conditional header. The max length in bytes permitted for - the append blob. If the Append Block operation would cause the blob - to exceed that limit or if the blob size is already greater than the - value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.2.0 - - :keyword str encoding: - Defaults to UTF-8. - :keyword progress_hook: - An async callback to track the progress of a long running upload. The signature is - function(current: int, total: Optional[int]) where current is the number of bytes transferred - so far, and total is the size of the blob or None if the size is unknown. - :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] - :returns: A BlobClient to interact with the newly uploaded blob. - :rtype: ~azure.storage.blob.aio.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START upload_blob_to_container] - :end-before: [END upload_blob_to_container] - :language: python - :dedent: 12 - :caption: Upload blob to the container. - """ - if isinstance(name, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param name is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob = self.get_blob_client(name) - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - encoding = kwargs.pop('encoding', 'UTF-8') - await blob.upload_blob( - data, - blob_type=blob_type, - length=length, - metadata=metadata, - timeout=timeout, - encoding=encoding, - **kwargs - ) - return blob - - @distributed_trace_async - async def delete_blob( - self, blob: str, - delete_snapshots: Optional[str] = None, - **kwargs: Any - ) -> None: - """Marks the specified blob or snapshot for deletion. - - The blob is later deleted during garbage collection. - Note that in order to delete a blob, you must delete all of its - snapshots. You can delete both at the same time with the delete_blob - operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot - and retains the blob or snapshot for specified number of days. - After specified number of days, blob's data is removed from the service during garbage collection. - Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` - - :param str blob: The blob with which to interact. - :param str delete_snapshots: - Required if the blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to delete. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword lease: - Required if the blob has an active lease. Value can be a Lease object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob = self.get_blob_client(blob) # type: ignore - kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - await blob.delete_blob( # type: ignore - delete_snapshots=delete_snapshots, - timeout=timeout, - **kwargs) - - @overload - async def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: str, - **kwargs: Any - ) -> StorageStreamDownloader[str]: - ... - - @overload - async def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: None = None, - **kwargs: Any - ) -> StorageStreamDownloader[bytes]: - ... - - @distributed_trace_async - async def download_blob( - self, blob: str, - offset: Optional[int] = None, - length: Optional[int] = None, - *, - encoding: Union[str, None] = None, - **kwargs: Any - ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: - """Downloads a blob to the StorageStreamDownloader. The readall() method must - be used to read all the content or readinto() must be used to download the blob into - a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. - - :param str blob: The blob with which to interact. - :param int offset: - Start of byte range to use for downloading a section of the blob. - Must be set if length is provided. - :param int length: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - :keyword str version_id: - The version id parameter is an opaque DateTime - value that, when present, specifies the version of the blob to download. - - .. versionadded:: 12.4.0 - - This keyword argument was introduced in API version '2019-12-12'. - - :keyword bool validate_content: - If true, calculates an MD5 hash for each chunk of the blob. The storage - service checks the hash of the content that has arrived with the hash - that was sent. This is primarily valuable for detecting bitflips on - the wire if using http instead of https, as https (the default), will - already validate. Note that this MD5 hash is not stored with the - blob. Also note that if enabled, the memory-efficient upload algorithm - will not be used because computing the MD5 hash requires buffering - entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. - :keyword lease: - Required if the blob has an active lease. If specified, download_blob only - succeeds if the blob's lease is active and matches this ID. Value can be a - BlobLeaseClient object or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: - Encrypts the data on the service-side with the given key. - Use of customer-provided keys must be done over HTTPS. - As the encryption key itself is provided in the request, - a secure connection must be established to transfer the key. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword str encoding: - Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. - :keyword progress_hook: - An async callback to track the progress of a long running download. The signature is - function(current: int, total: int) where current is the number of bytes transferred - so far, and total is the total size of the download. - :paramtype progress_hook: Callable[[int, int], Awaitable[None]] - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. This method may make multiple calls to the service and - the timeout will apply to each call individually. - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: A streaming object. (StorageStreamDownloader) - :rtype: ~azure.storage.blob.aio.StorageStreamDownloader - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_client = self.get_blob_client(blob) # type: ignore - kwargs.setdefault('merge_span', True) - return await blob_client.download_blob( - offset=offset, - length=length, - encoding=encoding, - **kwargs) - - @distributed_trace_async - async def delete_blobs( - self, *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: - """Marks the specified blobs or snapshots for deletion. - - The blobs are later deleted during garbage collection. - Note that in order to delete blobs, you must delete all of their - snapshots. You can delete both at the same time with the delete_blobs operation. - - If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots - and retains the blobs or snapshots for specified number of days. - After specified number of days, blobs' data is removed from the service during garbage collection. - Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` - - The maximum number of blobs that can be deleted in a single request is 256. - - :param blobs: - The blobs to delete. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - snapshot you want to delete: - key: 'snapshot', value type: str - version id: - key: 'version_id', value type: str - whether to delete snapshots when deleting blob: - key: 'delete_snapshots', value: 'include' or 'only' - if the blob modified or not: - key: 'if_modified_since', 'if_unmodified_since', value type: datetime - etag: - key: 'etag', value type: str - match the etag or not: - key: 'match_condition', value type: MatchConditions - tags match condition: - key: 'if_tags_match_condition', value type: str - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: Union[str, Dict[str, Any], BlobProperties] - :keyword str delete_snapshots: - Required if a blob has associated snapshots. Values include: - - "only": Deletes only the blobs snapshots. - - "include": Deletes the blob along with all snapshots. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. For optimal performance, - this should be set to False - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: An async iterator of responses, one for each blob in order - :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_common_async.py - :start-after: [START delete_multiple_blobs] - :end-before: [END delete_multiple_blobs] - :language: python - :dedent: 12 - :caption: Deleting multiple blobs. - """ - if len(blobs) == 0: - return AsyncList([]) - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - - reqs, options = _generate_delete_blobs_options( - self._query_str, - self.container_name, - self._client, - *blobs, - **kwargs - ) - - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - - @distributed_trace_async - async def set_standard_blob_tier_blobs( - self, standard_blob_tier: Union[str, 'StandardBlobTier'], - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: - """This operation sets the tier on block blobs. - - A block blob's tier determines Hot/Cool/Archive storage type. - This operation does not update the blob's ETag. - - The maximum number of blobs that can be updated in a single request is 256. - - :param standard_blob_tier: - Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', - 'Archive'. The hot tier is optimized for storing data that is accessed - frequently. The cool storage tier is optimized for storing data that - is infrequently accessed and stored for at least a month. The archive - tier is optimized for storing data that is rarely accessed and stored - for at least six months with flexible latency requirements. - - .. note:: - If you want to set different tier on different blobs please set this positional parameter to None. - Then the blob tier on every BlobProperties will be taken. - - :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier - :param blobs: - The blobs with which to interact. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - standard blob tier: - key: 'blob_tier', value type: StandardBlobTier - rehydrate priority: - key: 'rehydrate_priority', value type: RehydratePriority - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - tags match condition: - key: 'if_tags_match_condition', value type: str - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties - :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. For optimal performance, - this should be set to False. - :return: An async iterator of responses, one for each blob in order - :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] - """ - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - reqs, options = _generate_set_tiers_options( - self._query_str, - self.container_name, - standard_blob_tier, - self._client, - *blobs, - **kwargs) - - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - - @distributed_trace_async - async def set_premium_page_blob_tier_blobs( - self, premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], - *blobs: Union[str, Dict[str, Any], BlobProperties], - **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: - """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. - - The maximum number of blobs that can be updated in a single request is 256. - - :param premium_page_blob_tier: - A page blob tier value to set on all blobs to. The tier correlates to the size of the - blob and number of allowed IOPS. This is only applicable to page blobs on - premium storage accounts. - - .. note:: - If you want to set different tier on different blobs please set this positional parameter to None. - Then the blob tier on every BlobProperties will be taken. - - :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier - :param blobs: The blobs with which to interact. This can be a single blob, or multiple values can - be supplied, where each value is either the name of the blob (str) or BlobProperties. - - .. note:: - When the blob type is dict, here's a list of keys, value rules. - - blob name: - key: 'name', value type: str - premium blob tier: - key: 'blob_tier', value type: PremiumPageBlobTier - lease: - key: 'lease_id', value type: Union[str, LeaseClient] - timeout for subrequest: - key: 'timeout', value type: int - - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. For optimal performance, - this should be set to False. - :return: An async iterator of responses, one for each blob in order - :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] - """ - if self._is_localhost: - kwargs['url_prepend'] = self.account_name - reqs, options = _generate_set_tiers_options( - self._query_str, - self.container_name, - premium_page_blob_tier, - self._client, - *blobs, - **kwargs) - - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - - def get_blob_client( - self, blob: str, - snapshot: Optional[str] = None, - *, - version_id: Optional[str] = None - ) -> BlobClient: - """Get a client to interact with the specified blob. - - The blob need not already exist. - - :param str blob: - The blob with which to interact. - :param str snapshot: - The optional blob snapshot on which to operate. This can be the snapshot ID string - or the response returned from :func:`~BlobClient.create_snapshot()`. - :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, - specifies the version of the blob to operate on. - :returns: A BlobClient. - :rtype: ~azure.storage.blob.aio.BlobClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_containers_async.py - :start-after: [START get_blob_client] - :end-before: [END get_blob_client] - :language: python - :dedent: 12 - :caption: Get the blob client. - """ - if isinstance(blob, BlobProperties): - warnings.warn( - "The use of a 'BlobProperties' instance for param blob is deprecated. " + - "Please use 'BlobProperties.name' or any other str input type instead.", - DeprecationWarning - ) - blob_name = blob.get('name') - else: - blob_name = blob - _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable = protected-access - ) - return BlobClient( - self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, - credential=self.credential, api_version=self.api_version, _configuration=self._config, - _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, encryption_version=self.encryption_version, - key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, - version_id=version_id) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py deleted file mode 100644 index dab5afdca85d..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ /dev/null @@ -1,872 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method -# mypy: disable-error-code=override - -import asyncio -import codecs -import sys -import warnings -from io import BytesIO, StringIO -from itertools import islice -from typing import ( - Any, AsyncIterator, Awaitable, - Generator, Callable, cast, Dict, - Generic, IO, Optional, overload, - Tuple, TypeVar, Union, TYPE_CHECKING -) - -from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError - -from .._shared.request_handlers import validate_and_format_range_headers -from .._shared.response_handlers import parse_length_from_content_range, process_storage_error -from .._deserialize import deserialize_blob_properties, get_page_ranges_result -from .._download import process_range_and_offset, _ChunkDownloader -from .._encryption import ( - adjust_blob_size_for_encryption, - decrypt_blob, - is_encryption_v2, - parse_encryption_data -) - -if TYPE_CHECKING: - from codecs import IncrementalDecoder - from .._encryption import _EncryptionData - from .._generated.aio import AzureBlobStorage - from .._models import BlobProperties - from .._shared.models import StorageConfiguration - - -T = TypeVar('T', bytes, str) - - -async def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: - if data is None: - raise ValueError("Response cannot be None.") - await data.response.read() - content = cast(bytes, data.response.content) - if encryption.get('key') is not None or encryption.get('resolver') is not None: - try: - return decrypt_blob( - encryption.get('required') or False, - encryption.get('key'), - encryption.get('resolver'), - content, - start_offset, - end_offset, - data.response.headers) - except Exception as error: - raise HttpResponseError( - message="Decryption failed.", - response=data.response, - error=error) from error - return content - - -class _AsyncChunkDownloader(_ChunkDownloader): - def __init__(self, **kwargs: Any) -> None: - super(_AsyncChunkDownloader, self).__init__(**kwargs) - self.stream_lock_async = asyncio.Lock() if kwargs.get('parallel') else None - self.progress_lock_async = asyncio.Lock() if kwargs.get('parallel') else None - - async def process_chunk(self, chunk_start: int) -> None: - chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data, _ = await self._download_chunk(chunk_start, chunk_end - 1) - length = chunk_end - chunk_start - if length > 0: - await self._write_to_stream(chunk_data, chunk_start) - await self._update_progress(length) - - async def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: - chunk_start, chunk_end = self._calculate_range(chunk_start) - return await self._download_chunk(chunk_start, chunk_end - 1) - - async def _update_progress(self, length: int) -> None: - if self.progress_lock_async: - async with self.progress_lock_async: - self.progress_total += length - else: - self.progress_total += length - - if self.progress_hook: - await cast(Callable[[int, Optional[int]], Awaitable[Any]], self.progress_hook)( - self.progress_total, self.total_size) - - async def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: - if self.stream_lock_async: - async with self.stream_lock_async: - self.stream.seek(self.stream_start + (chunk_start - self.start_index)) - self.stream.write(chunk_data) - else: - self.stream.write(chunk_data) - - async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: - if self.encryption_options is None: - raise ValueError("Required argument is missing: encryption_options") - download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data - ) - - # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. - # Do optimize and create empty chunk locally if condition is met. - if self._do_optimize(download_range[0], download_range[1]): - content_length = download_range[1] - download_range[0] + 1 - chunk_data = b"\x00" * content_length - else: - range_header, range_validation = validate_and_format_range_headers( - download_range[0], - download_range[1], - check_content_md5=self.validate_content - ) - - retry_active = True - retry_total = 3 - while retry_active: - try: - _, response = await cast(Awaitable[Any], self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - )) - except HttpResponseError as error: - process_storage_error(error) - - try: - chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) - retry_active = False - except (IncompleteReadError, HttpResponseError, DecodeError) as error: - retry_total -= 1 - if retry_total <= 0: - raise HttpResponseError(error, error=error) from error - await asyncio.sleep(1) - content_length = response.content_length - - # This makes sure that if_match is set so that we can validate - # that subsequent downloads are to an unmodified blob - if self.request_options.get('modified_access_conditions'): - self.request_options['modified_access_conditions'].if_match = response.properties.etag - - return chunk_data, content_length - - -class _AsyncChunkIterator(object): - """Async iterator for chunks in blob download stream.""" - - def __init__(self, size: int, content: bytes, downloader: Optional[_AsyncChunkDownloader], chunk_size: int) -> None: - self.size = size - self._chunk_size = chunk_size - self._current_content = content - self._iter_downloader = downloader - self._iter_chunks: Optional[Generator[int, None, None]] = None - self._complete = size == 0 - - def __len__(self) -> int: - return self.size - - def __iter__(self) -> None: - raise TypeError("Async stream must be iterated asynchronously.") - - def __aiter__(self) -> AsyncIterator[bytes]: - return self - - # Iterate through responses. - async def __anext__(self) -> bytes: - if self._complete: - raise StopAsyncIteration("Download complete") - if not self._iter_downloader: - # cut the data obtained from initial GET into chunks - if len(self._current_content) > self._chunk_size: - return self._get_chunk_data() - self._complete = True - return self._current_content - - if not self._iter_chunks: - self._iter_chunks = self._iter_downloader.get_chunk_offsets() - - # initial GET result still has more than _chunk_size bytes of data - if len(self._current_content) >= self._chunk_size: - return self._get_chunk_data() - - try: - chunk = next(self._iter_chunks) - self._current_content += (await self._iter_downloader.yield_chunk(chunk))[0] - except StopIteration as exc: - self._complete = True - # it's likely that there some data left in self._current_content - if self._current_content: - return self._current_content - raise StopAsyncIteration("Download complete") from exc - - return self._get_chunk_data() - - def _get_chunk_data(self) -> bytes: - chunk_data = self._current_content[: self._chunk_size] - self._current_content = self._current_content[self._chunk_size:] - return chunk_data - - -class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes - """ - A streaming object to download from Azure Storage. - """ - - name: str - """The name of the blob being downloaded.""" - container: str - """The name of the container where the blob is.""" - properties: "BlobProperties" - """The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties.""" - size: int - """The size of the total data in the stream. This will be the byte range if specified, - otherwise the total size of the blob.""" - - def __init__( - self, - clients: "AzureBlobStorage" = None, # type: ignore [assignment] - config: "StorageConfiguration" = None, # type: ignore [assignment] - start_range: Optional[int] = None, - end_range: Optional[int] = None, - validate_content: bool = None, # type: ignore [assignment] - encryption_options: Dict[str, Any] = None, # type: ignore [assignment] - max_concurrency: int = 1, - name: str = None, # type: ignore [assignment] - container: str = None, # type: ignore [assignment] - encoding: Optional[str] = None, - download_cls: Optional[Callable] = None, - **kwargs: Any - ) -> None: - self.name = name - self.container = container - self.size = 0 - - self._clients = clients - self._config = config - self._start_range = start_range - self._end_range = end_range - self._max_concurrency = max_concurrency - self._encoding = encoding - self._validate_content = validate_content - self._encryption_options = encryption_options or {} - self._progress_hook = kwargs.pop('progress_hook', None) - self._request_options = kwargs - self._response = None - self._location_mode = None - self._current_content: Union[str, bytes] = b'' - self._file_size = 0 - self._non_empty_ranges = None - self._encryption_data: Optional["_EncryptionData"] = None - - # The content download offset, after any processing (decryption), in bytes - self._download_offset = 0 - # The raw download offset, before processing (decryption), in bytes - self._raw_download_offset = 0 - # The offset the stream has been read to in bytes or chars depending on mode - self._read_offset = 0 - # The offset into current_content that has been consumed in bytes or chars depending on mode - self._current_content_offset = 0 - - self._text_mode: Optional[bool] = None - self._decoder: Optional["IncrementalDecoder"] = None - # Whether the current content is the first chunk of download content or not - self._first_chunk = True - self._download_start = self._start_range or 0 - - # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ - # but needs to be changed to cls in the request options. - self._request_options['cls'] = download_cls - - def __len__(self): - return self.size - - async def _get_encryption_data_request(self) -> None: - # Save current request cls - download_cls = self._request_options.pop('cls', None) - # Adjust cls for get_properties - self._request_options['cls'] = deserialize_blob_properties - - properties = cast("BlobProperties", await self._clients.blob.get_properties(**self._request_options)) - # This will return None if there is no encryption metadata or there are parsing errors. - # That is acceptable here, the proper error will be caught and surfaced when attempting - # to decrypt the blob. - self._encryption_data = parse_encryption_data(properties.metadata) - - # Restore cls for download - self._request_options['cls'] = download_cls - - async def _setup(self) -> None: - if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: - await self._get_encryption_data_request() - - # The service only provides transactional MD5s for chunks under 4MB. - # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first - # chunk so a transactional MD5 can be retrieved. - first_get_size = ( - self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size - ) - initial_request_start = self._start_range if self._start_range is not None else 0 - if self._end_range is not None and self._end_range - initial_request_start < first_get_size: - initial_request_end = self._end_range - else: - initial_request_end = initial_request_start + first_get_size - 1 - - # pylint: disable-next=attribute-defined-outside-init - self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, - initial_request_end, - self._end_range, - self._encryption_options, - self._encryption_data - ) - - self._response = await self._initial_request() - self.properties = cast("BlobProperties", self._response.properties) # type: ignore [attr-defined] - self.properties.name = self.name - self.properties.container = self.container - - # Set the content length to the download size instead of the size of the last range - self.properties.size = self.size - self.properties.content_range = (f"bytes {self._download_start}-" - f"{self._end_range if self._end_range is not None else self._file_size - 1}/" - f"{self._file_size}") - - # Overwrite the content MD5 as it is the MD5 for the last range instead - # of the stored MD5 - # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None # type: ignore [attr-defined] - - @property - def _download_complete(self): - if is_encryption_v2(self._encryption_data): - return self._download_offset >= self.size - return self._raw_download_offset >= self.size - - async def _initial_request(self): - range_header, range_validation = validate_and_format_range_headers( - self._initial_range[0], - self._initial_range[1], - start_range_required=False, - end_range_required=False, - check_content_md5=self._validate_content - ) - - retry_active = True - retry_total = 3 - while retry_active: - try: - location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options - )) - - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._file_size is None: - raise ValueError("Required Content-Range response header is missing or malformed.") - # Remove any extra encryption data size from blob size - self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) - - if self._end_range is not None and self._start_range is not None: - # Use the length unless it is over the end of the file - self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size - - except HttpResponseError as error: - if self._start_range is None and error.response and error.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options)) - except HttpResponseError as e: - process_storage_error(e) - - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) - - try: - if self.size == 0: - self._current_content = b"" - else: - self._current_content = await process_content( - response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) - retry_active = False - except (IncompleteReadError, HttpResponseError, DecodeError) as error: - retry_total -= 1 - if retry_total <= 0: - raise HttpResponseError(error, error=error) from error - await asyncio.sleep(1) - self._download_offset += len(self._current_content) - self._raw_download_offset += response.content_length - - # get page ranges to optimize downloading sparse page blob - if response.properties.blob_type == 'PageBlob': - try: - page_ranges = await self._clients.page_blob.get_page_ranges() - self._non_empty_ranges = get_page_ranges_result(page_ranges)[0] - except HttpResponseError: - pass - - if not self._download_complete and self._request_options.get("modified_access_conditions"): - self._request_options["modified_access_conditions"].if_match = response.properties.etag - - return response - - def chunks(self) -> AsyncIterator[bytes]: - """ - Iterate over chunks in the download stream. Note, the iterator returned will - iterate over the entire download content, regardless of any data that was - previously read. - - NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. - - :returns: An async iterator of the chunks in the download stream. - :rtype: AsyncIterator[bytes] - - .. admonition:: Example: - - .. literalinclude:: ../samples/blob_samples_hello_world_async.py - :start-after: [START download_a_blob_in_chunk] - :end-before: [END download_a_blob_in_chunk] - :language: python - :dedent: 16 - :caption: Download a blob using chunks(). - """ - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") - if self._encoding: - warnings.warn("Encoding is ignored with chunks as only bytes are supported.") - - iter_downloader = None - # If we still have the first chunk buffered, use it. Otherwise, download all content again - if not self._first_chunk or not self._download_complete: - if self._first_chunk: - start = self._download_start + len(self._current_content) - current_progress = len(self._current_content) - else: - start = self._download_start - current_progress = 0 - - end = self._download_start + self.size - - iter_downloader = _AsyncChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=current_progress, - start_range=start, - end_range=end, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - **self._request_options - ) - - initial_content = self._current_content if self._first_chunk else b'' - return _AsyncChunkIterator( - size=self.size, - content=cast(bytes, initial_content), - downloader=iter_downloader, - chunk_size=self._config.max_chunk_get_size) - - @overload - async def read(self, size: int = -1) -> T: - ... - - @overload - async def read(self, *, chars: Optional[int] = None) -> T: - ... - - # pylint: disable-next=too-many-statements,too-many-branches - async def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: - """ - Read the specified bytes or chars from the stream. If `encoding` - was specified on `download_blob`, it is recommended to use the - chars parameter to read a specific number of chars to avoid decoding - errors. If size/chars is unspecified or negative all bytes will be read. - - :param int size: - The number of bytes to download from the stream. Leave unspecified - or set negative to download all bytes. - :keyword Optional[int] chars: - The number of chars to download from the stream. Leave unspecified - or set negative to download all chars. Note, this can only be used - when encoding is specified on `download_blob`. - :returns: - The requested data as bytes or a string if encoding was specified. If - the return value is empty, there is no more data to read. - :rtype: T - """ - if size > -1 and self._encoding: - warnings.warn( - "Size parameter specified with text encoding enabled. It is recommended to use chars " - "to read a specific number of characters instead." - ) - if size > -1 and chars is not None: - raise ValueError("Cannot specify both size and chars.") - if not self._encoding and chars is not None: - raise ValueError("Must specify encoding to read chars.") - if self._text_mode and size > -1: - raise ValueError("Stream has been partially read in text mode. Please use chars.") - if self._text_mode is False and chars is not None: - raise ValueError("Stream has been partially read in bytes mode. Please use size.") - - # Empty blob or already read to the end - if (size == 0 or chars == 0 or - (self._download_complete and self._current_content_offset >= len(self._current_content))): - return b'' if not self._encoding else '' # type: ignore [return-value] - - if not self._text_mode and chars is not None and self._encoding is not None: - self._text_mode = True - self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') - self._current_content = self._decoder.decode( - cast(bytes, self._current_content), final=self._download_complete) - elif self._text_mode is None: - self._text_mode = False - - output_stream: Union[BytesIO, StringIO] - if self._text_mode: - output_stream = StringIO() - size = chars if chars else sys.maxsize - else: - output_stream = BytesIO() - size = size if size > 0 else sys.maxsize - readall = size == sys.maxsize - count = 0 - - # Start by reading from current_content - start = self._current_content_offset - length = min(len(self._current_content) - self._current_content_offset, size - count) - read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] - - count += read - self._current_content_offset += read - self._read_offset += read - await self._check_and_report_progress() - - remaining = size - count - if remaining > 0 and not self._download_complete: - # Create a downloader than can download the rest of the file - start = self._download_start + self._download_offset - end = self._download_start + self.size - - parallel = self._max_concurrency > 1 - downloader = _AsyncChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=self._read_offset, - start_range=start, - end_range=end, - stream=output_stream, - parallel=parallel, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - progress_hook=self._progress_hook, - **self._request_options - ) - self._first_chunk = False - - # When reading all data, have the downloader read everything into the stream. - # Else, read one chunk at a time (using the downloader as an iterator) until - # the requested size is reached. - chunks_iter = downloader.get_chunk_offsets() - if readall and not self._text_mode: - running_futures: Any = [ - asyncio.ensure_future(downloader.process_chunk(d)) - for d in islice(chunks_iter, 0, self._max_concurrency) - ] - while running_futures: - # Wait for some download to finish before adding a new one - done, running_futures = await asyncio.wait( - running_futures, return_when=asyncio.FIRST_COMPLETED) - try: - for task in done: - task.result() - except HttpResponseError as error: - process_storage_error(error) - try: - for _ in range(0, len(done)): - next_chunk = next(chunks_iter) - running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) - except StopIteration: - break - - if running_futures: - # Wait for the remaining downloads to finish - done, _running_futures = await asyncio.wait(running_futures) - try: - for task in done: - task.result() - except HttpResponseError as error: - process_storage_error(error) - - self._complete_read() - - else: - while (chunk := next(chunks_iter, None)) is not None and remaining > 0: - chunk_data, content_length = await downloader.yield_chunk(chunk) - self._download_offset += len(chunk_data) - self._raw_download_offset += content_length - if self._text_mode and self._decoder is not None: - self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) - else: - self._current_content = chunk_data - - if remaining < len(self._current_content): - read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] - else: - read = output_stream.write(self._current_content) # type: ignore [arg-type] - - self._current_content_offset = read - self._read_offset += read - remaining -= read - await self._check_and_report_progress() - - data = output_stream.getvalue() - if not self._text_mode and self._encoding: - try: - # This is technically incorrect to do, but we have it for backwards compatibility. - data = cast(bytes, data).decode(self._encoding) - except UnicodeDecodeError: - warnings.warn( - "Encountered a decoding error while decoding blob data from a partial read. " - "Try using the `chars` keyword instead to read in text mode." - ) - raise - - return data # type: ignore [return-value] - - async def readall(self) -> T: - """ - Read the entire contents of this blob. - This operation is blocking until all data is downloaded. - - :returns: The requested data as bytes or a string if encoding was specified. - :rtype: T - """ - return await self.read() - - async def readinto(self, stream: IO[bytes]) -> int: - """Download the contents of this blob to a stream. - - :param IO[bytes] stream: - The stream to download to. This can be an open file-handle, - or any writable stream. The stream must be seekable if the download - uses more than one parallel connection. - :returns: The number of bytes read. - :rtype: int - """ - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") - if self._encoding: - warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") - - # the stream must be seekable if parallel download is required - parallel = self._max_concurrency > 1 - if parallel: - error_message = "Target stream handle must be seekable." - if sys.version_info >= (3,) and not stream.seekable(): - raise ValueError(error_message) - - try: - stream.seek(stream.tell()) - except (NotImplementedError, AttributeError) as exc: - raise ValueError(error_message) from exc - - # If some data has been streamed using `read`, only stream the remaining data - remaining_size = self.size - self._read_offset - # Already read to the end - if remaining_size <= 0: - return 0 - - # Write the current content to the user stream - current_remaining = len(self._current_content) - self._current_content_offset - start = self._current_content_offset - count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) - - self._current_content_offset += count - self._read_offset += count - if self._progress_hook: - await self._progress_hook(self._read_offset, self.size) - - # If all the data was already downloaded/buffered - if self._download_complete: - return remaining_size - - data_start = self._download_start + self._read_offset - data_end = self._download_start + self.size - - downloader = _AsyncChunkDownloader( - client=self._clients.blob, - non_empty_ranges=self._non_empty_ranges, - total_size=self.size, - chunk_size=self._config.max_chunk_get_size, - current_progress=self._read_offset, - start_range=data_start, - end_range=data_end, - stream=stream, - parallel=parallel, - validate_content=self._validate_content, - encryption_options=self._encryption_options, - encryption_data=self._encryption_data, - use_location=self._location_mode, - progress_hook=self._progress_hook, - **self._request_options - ) - - dl_tasks = downloader.get_chunk_offsets() - running_futures = { - asyncio.ensure_future(downloader.process_chunk(d)) - for d in islice(dl_tasks, 0, self._max_concurrency) - } - while running_futures: - # Wait for some download to finish before adding a new one - done, running_futures = await asyncio.wait( - running_futures, return_when=asyncio.FIRST_COMPLETED) - try: - for task in done: - task.result() - except HttpResponseError as error: - process_storage_error(error) - try: - for _ in range(0, len(done)): - next_chunk = next(dl_tasks) - running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) - except StopIteration: - break - - if running_futures: - # Wait for the remaining downloads to finish - done, _running_futures = await asyncio.wait(running_futures) - try: - for task in done: - task.result() - except HttpResponseError as error: - process_storage_error(error) - - self._complete_read() - return remaining_size - - def _complete_read(self): - """Adjusts all offsets to the end of the download.""" - self._download_offset = self.size - self._raw_download_offset = self.size - self._read_offset = self.size - self._current_content_offset = len(self._current_content) - - async def _check_and_report_progress(self): - """Reports progress if necessary.""" - # Only report progress at the end of each chunk and use download_offset to always report - # progress in terms of (approximate) byte count. - if self._progress_hook and self._current_content_offset == len(self._current_content): - await self._progress_hook(self._download_offset, self.size) - - async def content_as_bytes(self, max_concurrency=1): - """DEPRECATED: Download the contents of this file. - - This operation is blocking until all data is downloaded. - - This method is deprecated, use func:`readall` instead. - - :param int max_concurrency: - The number of parallel connections with which to download. - :returns: The contents of the file as bytes. - :rtype: bytes - """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "content_as_bytes is not supported in text mode.") - - self._max_concurrency = max_concurrency - return await self.readall() - - async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """DEPRECATED: Download the contents of this blob, and decode as text. - - This operation is blocking until all data is downloaded. - - This method is deprecated, use func:`readall` instead. - - :param int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :returns: The content of the file as a str. - :rtype: str - """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "content_as_text is not supported in text mode.") - - self._max_concurrency = max_concurrency - self._encoding = encoding - return await self.readall() - - async def download_to_stream(self, stream, max_concurrency=1): - """DEPRECATED: Download the contents of this blob to a stream. - - This method is deprecated, use func:`readinto` instead. - - :param IO[T] stream: - The stream to download to. This can be an open file-handle, - or any writable stream. The stream must be seekable if the download - uses more than one parallel connection. - :param int max_concurrency: - The number of parallel connections with which to download. - :returns: The properties of the downloaded blob. - :rtype: Any - """ - warnings.warn( - "download_to_stream is deprecated, use readinto instead", - DeprecationWarning - ) - if self._text_mode: - raise ValueError("Stream has been partially read in text mode. " - "download_to_stream is not supported in text mode.") - - self._max_concurrency = max_concurrency - await self.readinto(stream) - return self.properties diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_encryption_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_encryption_async.py deleted file mode 100644 index 97334d96da59..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_encryption_async.py +++ /dev/null @@ -1,72 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import inspect -import sys -from io import BytesIO -from typing import IO - -from .._encryption import _GCM_REGION_DATA_LENGTH, encrypt_data_v2 - - -class GCMBlobEncryptionStream: - """ - An async stream that performs AES-GCM encryption on the given data as - it's streamed. Data is read and encrypted in regions. The stream - will use the same encryption key and will generate a guaranteed unique - nonce for each encryption region. - """ - def __init__( - self, content_encryption_key: bytes, - data_stream: IO[bytes], - ) -> None: - """ - :param bytes content_encryption_key: The encryption key to use. - :param IO[bytes] data_stream: The data stream to read data from. - """ - self.content_encryption_key = content_encryption_key - self.data_stream = data_stream - - self.offset = 0 - self.current = b'' - self.nonce_counter = 0 - - async def read(self, size: int = -1) -> bytes: - """ - Read data from the stream. Specify -1 to read all available data. - - :param int size: The amount of data to read. Defaults to -1 for all data. - :return: The bytes read. - :rtype: bytes - """ - result = BytesIO() - remaining = sys.maxsize if size == -1 else size - - while remaining > 0: - # Start by reading from current - if len(self.current) > 0: - read = min(remaining, len(self.current)) - result.write(self.current[:read]) - - self.current = self.current[read:] - self.offset += read - remaining -= read - - if remaining > 0: - # Read one region of data and encrypt it - data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) - if inspect.isawaitable(data): - data = await data - - if len(data) == 0: - # No more data to read - break - - self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) - # IMPORTANT: Must increment the nonce each time. - self.nonce_counter += 1 - - return result.getvalue() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_lease_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_lease_async.py deleted file mode 100644 index 90987d3d111c..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_lease_async.py +++ /dev/null @@ -1,346 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method, docstring-keyword-should-match-keyword-only - -import uuid -from typing import Any, Optional, Union, TYPE_CHECKING - -from azure.core.exceptions import HttpResponseError -from azure.core.tracing.decorator_async import distributed_trace_async - -from .._shared.response_handlers import process_storage_error, return_response_headers -from .._serialize import get_modify_conditions - -if TYPE_CHECKING: - from azure.storage.blob.aio import BlobClient, ContainerClient - from datetime import datetime - - -class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword - """Creates a new BlobLeaseClient. - - This client provides lease operations on a BlobClient or ContainerClient. - :param client: The client of the blob or container to lease. - :type client: Union[BlobClient, ContainerClient] - :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be - specified in order to acquire a new lease, or break one. - :type lease_id: Optional[str] - """ - - id: str - """The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired.""" - etag: Optional[str] - """The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified.""" - last_modified: Optional["datetime"] - """The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified.""" - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs - self, client: Union["BlobClient", "ContainerClient"], - lease_id: Optional[str] = None - ) -> None: - self.id = lease_id or str(uuid.uuid4()) - self.last_modified = None - self.etag = None - if hasattr(client, 'blob_name'): - self._client = client._client.blob - elif hasattr(client, 'container_name'): - self._client = client._client.container - else: - raise TypeError("Lease must use either BlobClient or ContainerClient.") - - def __enter__(self): - raise TypeError("Async lease must use 'async with'.") - - def __exit__(self, *args): - self.release() - - async def __aenter__(self): - return self - - async def __aexit__(self, *args): - await self.release() - - @distributed_trace_async - async def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: - """Requests a new lease. - - If the container does not have an active lease, the Blob service creates a - lease on the container and returns a new lease ID. - - :param int lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. Default is -1 (infinite lease). - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :rtype: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = await self._client.acquire_lease( - timeout=kwargs.pop('timeout', None), - duration=lease_duration, - proposed_lease_id=self.id, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - self.etag = response.get('etag') - - @distributed_trace_async - async def renew(self, **kwargs: Any) -> None: - """Renews the lease. - - The lease can be renewed if the lease ID specified in the - lease client matches that associated with the container or blob. Note that - the lease may be renewed even if it has expired as long as the container - or blob has not been leased again since the expiration of that lease. When you - renew a lease, the lease duration clock resets. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = await self._client.renew_lease( - lease_id=self.id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace_async - async def release(self, **kwargs: Any) -> None: - """Release the lease. - - The lease may be released if the client lease id specified matches - that associated with the container or blob. Releasing the lease allows another client - to immediately acquire the lease for the container or blob as soon as the release is complete. - - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = await self._client.release_lease( - lease_id=self.id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace_async - async def change(self, proposed_lease_id: str, **kwargs: Any) -> None: - """Change the lease ID of an active lease. - - :param str proposed_lease_id: - Proposed lease ID, in a GUID string format. The Blob service returns 400 - (Invalid request) if the proposed lease ID is not in the correct format. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str etag: - An ETag value, or the wildcard character (*). Used to check if the resource has changed, - and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: - The match condition to use upon the etag. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: None - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = await self._client.change_lease( - lease_id=self.id, - proposed_lease_id=proposed_lease_id, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - self.etag = response.get('etag') - self.id = response.get('lease_id') - self.last_modified = response.get('last_modified') - - @distributed_trace_async - async def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: - """Break the lease, if the container or blob has an active lease. - - Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; - the request is not required to specify a matching lease ID. When a lease - is broken, the lease break period is allowed to elapse, during which time - no lease operation except break and release can be performed on the container or blob. - When a lease is successfully broken, the response indicates the interval - in seconds until a new lease can be acquired. - - :param int lease_break_period: - This is the proposed duration of seconds that the lease - should continue before it is broken, between 0 and 60 seconds. This - break period is only used if it is shorter than the time remaining - on the lease. If longer, the time remaining on the lease is used. - A new lease will not be available before the break period has - expired, but the lease may be held for longer than the break - period. If this header does not appear with a break - operation, a fixed-duration lease breaks after the remaining lease - period elapses, and an infinite lease breaks immediately. - :keyword ~datetime.datetime if_modified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only - if the resource has been modified since the specified time. - :keyword ~datetime.datetime if_unmodified_since: - A DateTime value. Azure expects the date value passed in to be UTC. - If timezone is included, any non-UTC datetimes will be converted to UTC. - If a date is passed in without timezone info, it is assumed to be UTC. - Specify this header to perform the operation only if - the resource has not been modified since the specified date/time. - :keyword str if_tags_match_condition: - Specify a SQL where clause on blob tags to operate only on blob with a matching value. - eg. ``\"\\\"tagname\\\"='my tag'\"`` - - .. versionadded:: 12.4.0 - - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. - :return: Approximate time remaining in the lease period, in seconds. - :rtype: int - """ - mod_conditions = get_modify_conditions(kwargs) - try: - response: Any = await self._client.break_lease( - timeout=kwargs.pop('timeout', None), - break_period=lease_break_period, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - **kwargs) - except HttpResponseError as error: - process_storage_error(error) - return response.get('lease_time') # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_list_blobs_helper.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_list_blobs_helper.py deleted file mode 100644 index 6a21e1ce7bc4..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_list_blobs_helper.py +++ /dev/null @@ -1,250 +0,0 @@ -# pylint: disable=too-many-lines -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -from typing import Callable, List, Optional -from urllib.parse import unquote - -from azure.core.async_paging import AsyncItemPaged, AsyncPageIterator -from azure.core.exceptions import HttpResponseError - -from .._deserialize import ( - get_blob_properties_from_generated_code, - load_many_xml_nodes, - load_xml_int, - load_xml_string -) -from .._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix -from .._models import BlobProperties -from .._shared.models import DictMixin -from .._shared.response_handlers import ( - process_storage_error, - return_context_and_deserialized, - return_raw_deserialized -) - - -class BlobPropertiesPaged(AsyncPageIterator): - """An Iterable of Blob properties.""" - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - container: Optional[str] - """The container that the blobs are listed from.""" - delimiter: Optional[str] - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - - def __init__( - self, command: Callable, - container: Optional[str] = None, - prefix: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - delimiter: Optional[str] = None, - location_mode: Optional[str] = None, - ) -> None: - super(BlobPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.container = container - self.delimiter = delimiter - self.current_page = None - self.location_mode = location_mode - - async def _get_next_cb(self, continuation_token): - try: - return await self._command( - prefix=self.prefix, - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - async def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.service_endpoint - self.prefix = self._response.prefix - self.marker = self._response.marker - self.results_per_page = self._response.max_results - self.container = self._response.container_name - self.current_page = [self._build_item(item) for item in self._response.segment.blob_items] - - return self._response.next_marker or None, self.current_page - - def _build_item(self, item): - if isinstance(item, BlobProperties): - return item - if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container # type: ignore [assignment] - return blob - return item - - -class BlobNamesPaged(AsyncPageIterator): - """An Iterable of Blob names.""" - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of blobs to retrieve per call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - container: Optional[str] - """The container that the blobs are listed from.""" - delimiter: Optional[str] - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - - def __init__( - self, command: Callable, - container: Optional[str] = None, - prefix: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - location_mode: Optional[str] = None - ) -> None: - super(BlobNamesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.container = container - self.current_page = None - self.location_mode = location_mode - - async def _get_next_cb(self, continuation_token): - try: - return await self._command( - prefix=self.prefix, - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_raw_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - async def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.get('ServiceEndpoint') - self.prefix = load_xml_string(self._response, 'Prefix') - self.marker = load_xml_string(self._response, 'Marker') - self.results_per_page = load_xml_int(self._response, 'MaxResults') - self.container = self._response.get('ContainerName') - - blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') - self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] - - next_marker = load_xml_string(self._response, 'NextMarker') - return next_marker or None, self.current_page - - -class BlobPrefix(AsyncItemPaged, DictMixin): - """An Iterable of Blob properties. - - Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory.""" - - name: str - """The prefix, or "directory name" of the blob.""" - service_endpoint: Optional[str] - """The service URL.""" - prefix: str - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - next_marker: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: str - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List[BlobProperties]] - """The current page of listed results.""" - delimiter: str - """A delimiting character used for hierarchy listing.""" - command: Callable - """Function to retrieve the next page of items.""" - container: str - """The name of the container.""" - - def __init__(self, *args, **kwargs): - super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) - self.name = kwargs.get('prefix') - self.prefix = kwargs.get('prefix') - self.results_per_page = kwargs.get('results_per_page') - self.container = kwargs.get('container') - self.delimiter = kwargs.get('delimiter') - self.location_mode = kwargs.get('location_mode') - - -class BlobPrefixPaged(BlobPropertiesPaged): - def __init__(self, *args, **kwargs): - super(BlobPrefixPaged, self).__init__(*args, **kwargs) - self.name = self.prefix - - async def _extract_data_cb(self, get_next_return): - continuation_token, _ = await super(BlobPrefixPaged, self)._extract_data_cb(get_next_return) - self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items - self.current_page = [self._build_item(item) for item in self.current_page] - self.delimiter = self._response.delimiter - - return continuation_token, self.current_page - - def _build_item(self, item): - item = super(BlobPrefixPaged, self)._build_item(item) - if isinstance(item, GenBlobPrefix): - if item.name.encoded: - name = unquote(item.name.content) - else: - name = item.name.content - return BlobPrefix( - self._command, - container=self.container, - prefix=name, - results_per_page=self.results_per_page, - location_mode=self.location_mode) - return item diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py deleted file mode 100644 index 2ebed220b5f6..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py +++ /dev/null @@ -1,200 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# pylint: disable=too-few-public-methods, too-many-instance-attributes -# pylint: disable=super-init-not-called, too-many-lines - -from typing import Callable, List, Optional, TYPE_CHECKING - -from azure.core.async_paging import AsyncPageIterator -from azure.core.exceptions import HttpResponseError - -from .._deserialize import parse_tags -from .._generated.models import FilterBlobItem -from .._models import ContainerProperties, FilteredBlob, parse_page_list -from .._shared.response_handlers import process_storage_error, return_context_and_deserialized - -if TYPE_CHECKING: - from .._models import BlobProperties - - -class ContainerPropertiesPaged(AsyncPageIterator): - """An Iterable of Container properties. - - :param Callable command: Function to retrieve the next page of items. - :param Optional[str] prefix: Filters the results to return only containers whose names - begin with the specified prefix. - :param Optional[int] results_per_page: The maximum number of container names to retrieve per - call. - :param Optional[str] continuation_token: An opaque continuation token. - """ - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A container name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: List[ContainerProperties] - """The current page of listed results.""" - - def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): - super(ContainerPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.prefix = prefix - self.marker = None - self.results_per_page = results_per_page - self.location_mode = None - self.current_page = [] - - async def _get_next_cb(self, continuation_token): - try: - return await self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - async def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.service_endpoint - self.prefix = self._response.prefix - self.marker = self._response.marker - self.results_per_page = self._response.max_results - self.current_page = [self._build_item(item) for item in self._response.container_items] - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_item(item): - return ContainerProperties._from_generated(item) # pylint: disable=protected-access - - -class FilteredBlobPaged(AsyncPageIterator): - """An Iterable of Blob properties. - - :param Callable command: Function to retrieve the next page of items. - :param Optional[str] container: The name of the container. - :param Optional[int] results_per_page: The maximum number of blobs to retrieve per - call. - :param Optional[str] continuation_token: An opaque continuation token. - :param Optional[str] location_mode: - Specifies the location the request should be sent to. This mode only applies for RA-GRS accounts - which allow secondary read access. Options include 'primary' or 'secondary'. - """ - - service_endpoint: Optional[str] - """The service URL.""" - prefix: Optional[str] - """A blob name prefix being used to filter the list.""" - marker: Optional[str] - """The continuation token of the current page of results.""" - results_per_page: Optional[int] - """The maximum number of results retrieved per API call.""" - continuation_token: Optional[str] - """The continuation token to retrieve the next page of results.""" - location_mode: Optional[str] - """The location mode being used to list results. The available - options include "primary" and "secondary".""" - current_page: Optional[List["BlobProperties"]] - """The current page of listed results.""" - container: Optional[str] - """The container that the blobs are listed from.""" - - def __init__( - self, command: Callable, - container: Optional[str] = None, - results_per_page: Optional[int] = None, - continuation_token: Optional[str] = None, - location_mode: Optional[str] = None - ) -> None: - super(FilteredBlobPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.service_endpoint = None - self.marker = continuation_token - self.results_per_page = results_per_page - self.container = container - self.current_page = None - self.location_mode = location_mode - - async def _get_next_cb(self, continuation_token): - try: - return await self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - async def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.service_endpoint = self._response.service_endpoint - self.marker = self._response.next_marker - self.current_page = [self._build_item(item) for item in self._response.blobs] - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_item(item): - if isinstance(item, FilterBlobItem): - tags = parse_tags(item.tags) - blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) - return blob - return item - - -class PageRangePaged(AsyncPageIterator): - def __init__(self, command, results_per_page=None, continuation_token=None): - super(PageRangePaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.results_per_page = results_per_page - self.location_mode = None - self.current_page = [] - - async def _get_next_cb(self, continuation_token): - try: - return await self._command( - marker=continuation_token or None, - maxresults=self.results_per_page, - cls=return_context_and_deserialized, - use_location=self.location_mode) - except HttpResponseError as error: - process_storage_error(error) - - async def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return - self.current_page = self._build_page(self._response) - - return self._response.next_marker or None, self.current_page - - @staticmethod - def _build_page(response): - if not response: - raise StopIteration - - return parse_page_list(response) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_patch.py similarity index 69% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_patch.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/aio/_patch.py index 71dde502c70f..f7dd32510333 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_patch.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_patch.py @@ -2,19 +2,13 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ - - """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports +from typing import List - from typing import List -__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_upload_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_upload_helpers.py deleted file mode 100644 index 794beee36e3b..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_upload_helpers.py +++ /dev/null @@ -1,334 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import inspect -from io import SEEK_SET, UnsupportedOperation -from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING - -from azure.core.exceptions import HttpResponseError, ResourceModifiedError - -from ._encryption_async import GCMBlobEncryptionStream -from .._encryption import ( - encrypt_blob, - get_adjusted_upload_size, - get_blob_encryptor_and_padder, - generate_blob_encryption_data, - _ENCRYPTION_PROTOCOL_V1, - _ENCRYPTION_PROTOCOL_V2 -) -from .._generated.models import ( - AppendPositionAccessConditions, - BlockLookupList, - ModifiedAccessConditions -) -from .._shared.response_handlers import process_storage_error, return_response_headers -from .._shared.uploads_async import ( - AppendBlobChunkUploader, - BlockBlobChunkUploader, - PageBlobChunkUploader, - upload_data_chunks, - upload_substream_blocks -) -from .._upload_helpers import _any_conditions, _convert_mod_error - -if TYPE_CHECKING: - from .._generated.aio.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations - from .._shared.models import StorageConfiguration - BlobLeaseClient = TypeVar("BlobLeaseClient") - - -async def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements - client: "BlockBlobOperations", - stream: IO, - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - validate_content: bool, - max_concurrency: Optional[int], - length: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if not overwrite and not _any_conditions(**kwargs): - kwargs['modified_access_conditions'].if_none_match = '*' - adjusted_count = length - if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) - blob_headers = kwargs.pop('blob_headers', None) - tier = kwargs.pop('standard_blob_tier', None) - blob_tags_string = kwargs.pop('blob_tags_string', None) - - immutability_policy = kwargs.pop('immutability_policy', None) - immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time - immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode - legal_hold = kwargs.pop('legal_hold', None) - progress_hook = kwargs.pop('progress_hook', None) - - # Do single put if the size is smaller than config.max_single_put_size - if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - data = stream.read(length or -1) - if inspect.isawaitable(data): - data = await data - if not isinstance(data, bytes): - raise TypeError('Blob data should be of type bytes.') - - if encryption_options.get('key'): - if not isinstance(data, bytes): - raise TypeError('Blob data should be of type bytes.') - encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_data - - response = cast(Dict[str, Any], await client.upload( - body=data, # type: ignore [arg-type] - content_length=adjusted_count, - blob_http_headers=blob_headers, - headers=headers, - cls=return_response_headers, - validate_content=validate_content, - data_stream_total=adjusted_count, - upload_stream_current=0, - tier=tier.value if tier else None, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - **kwargs)) - - if progress_hook: - await progress_hook(adjusted_count, adjusted_count) - - return response - - use_original_upload_path = blob_settings.use_byte_buffer or \ - validate_content or encryption_options.get('required') or \ - blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ - hasattr(stream, 'seekable') and not stream.seekable() or \ - not hasattr(stream, 'seek') or not hasattr(stream, 'tell') - - if use_original_upload_path: - total_size = length - encryptor, padder = None, None - if encryption_options and encryption_options.get('key'): - cek, iv, encryption_metadata = generate_blob_encryption_data( - encryption_options['key'], - encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_metadata - - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: - encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) - - # Adjust total_size for encryption V2 - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: - # Adjust total_size for encryption V2 - total_size = adjusted_count - # V2 wraps the data stream with an encryption stream - if cek is None: - raise ValueError("Generate encryption metadata failed. 'cek' is None.") - stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] - - block_ids = await upload_data_chunks( - service=client, - uploader_class=BlockBlobChunkUploader, - total_size=total_size, - chunk_size=blob_settings.max_block_size, - max_concurrency=max_concurrency, - stream=stream, - validate_content=validate_content, - progress_hook=progress_hook, - encryptor=encryptor, - padder=padder, - headers=headers, - **kwargs - ) - else: - block_ids = await upload_substream_blocks( - service=client, - uploader_class=BlockBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - max_concurrency=max_concurrency, - stream=stream, - validate_content=validate_content, - progress_hook=progress_hook, - headers=headers, - **kwargs - ) - - block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) - block_lookup.latest = block_ids - return cast(Dict[str, Any], await client.commit_block_list( - block_lookup, - blob_http_headers=blob_headers, - cls=return_response_headers, - validate_content=validate_content, - headers=headers, - tier=tier.value if tier else None, - blob_tags_string=blob_tags_string, - immutability_policy_expiry=immutability_policy_expiry, - immutability_policy_mode=immutability_policy_mode, - legal_hold=legal_hold, - **kwargs)) - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceModifiedError as mod_error: - if not overwrite: - _convert_mod_error(mod_error) - raise - - -async def upload_page_blob( - client: "PageBlobOperations", - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - stream: IO, - length: Optional[int] = None, - validate_content: Optional[bool] = None, - max_concurrency: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if not overwrite and not _any_conditions(**kwargs): - kwargs['modified_access_conditions'].if_none_match = '*' - if length is None or length < 0: - raise ValueError("A content length must be specified for a Page Blob.") - if length % 512 != 0: - raise ValueError(f"Invalid page blob size: {length}. " - "The size must be aligned to a 512-byte boundary.") - tier = None - if kwargs.get('premium_page_blob_tier'): - premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') - try: - tier = premium_page_blob_tier.value - except AttributeError: - tier = premium_page_blob_tier - - if encryption_options and encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data( - encryption_options['key'], - encryption_options['version']) - headers['x-ms-meta-encryptiondata'] = encryption_data - - blob_tags_string = kwargs.pop('blob_tags_string', None) - progress_hook = kwargs.pop('progress_hook', None) - - response = cast(Dict[str, Any], await client.create( - content_length=0, - blob_content_length=length, - blob_sequence_number=None, # type: ignore [arg-type] - blob_http_headers=kwargs.pop('blob_headers', None), - blob_tags_string=blob_tags_string, - tier=tier, - cls=return_response_headers, - headers=headers, - **kwargs)) - if length == 0: - return cast(Dict[str, Any], response) - - if encryption_options and encryption_options.get('key'): - if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: - encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - - kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return cast(Dict[str, Any], await upload_data_chunks( - service=client, - uploader_class=PageBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_page_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - - except HttpResponseError as error: - try: - process_storage_error(error) - except ResourceModifiedError as mod_error: - if not overwrite: - _convert_mod_error(mod_error) - raise - - -async def upload_append_blob( # pylint: disable=unused-argument - client: "AppendBlobOperations", - overwrite: bool, - encryption_options: Dict[str, Any], - blob_settings: "StorageConfiguration", - headers: Dict[str, Any], - stream: IO, - length: Optional[int] = None, - validate_content: Optional[bool] = None, - max_concurrency: Optional[int] = None, - **kwargs: Any -) -> Dict[str, Any]: - try: - if length == 0: - return {} - blob_headers = kwargs.pop('blob_headers', None) - append_conditions = AppendPositionAccessConditions( - max_size=kwargs.pop('maxsize_condition', None), - append_position=None) - blob_tags_string = kwargs.pop('blob_tags_string', None) - progress_hook = kwargs.pop('progress_hook', None) - - try: - if overwrite: - await client.create( - content_length=0, - blob_http_headers=blob_headers, - headers=headers, - blob_tags_string=blob_tags_string, - **kwargs) - return cast(Dict[str, Any], await upload_data_chunks( - service=client, - uploader_class=AppendBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - append_position_access_conditions=append_conditions, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - except HttpResponseError as error: - if error.response.status_code != 404: # type: ignore [union-attr] - raise - # rewind the request body if it is a stream - if hasattr(stream, 'read'): - try: - # attempt to rewind the body to the initial position - stream.seek(0, SEEK_SET) - except UnsupportedOperation as exc: - # if body is not seekable, then retry would not work - raise error from exc - await client.create( - content_length=0, - blob_http_headers=blob_headers, - headers=headers, - blob_tags_string=blob_tags_string, - **kwargs) - return cast(Dict[str, Any], await upload_data_chunks( - service=client, - uploader_class=AppendBlobChunkUploader, - total_size=length, - chunk_size=blob_settings.max_block_size, - stream=stream, - max_concurrency=max_concurrency, - validate_content=validate_content, - append_position_access_conditions=append_conditions, - progress_hook=progress_hook, - headers=headers, - **kwargs)) - except HttpResponseError as error: - process_storage_error(error) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/__init__.py similarity index 67% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/__init__.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/__init__.py index 1be05c7aa9a7..9bb9f47cf51b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/__init__.py @@ -2,16 +2,16 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._service_operations import ServiceOperations -from ._container_operations import ContainerOperations -from ._blob_operations import BlobOperations -from ._page_blob_operations import PageBlobOperations -from ._append_blob_operations import AppendBlobOperations -from ._block_blob_operations import BlockBlobOperations +from ._operations import ServiceOperations +from ._operations import ContainerOperations +from ._operations import BlobOperations +from ._operations import PageBlobOperations +from ._operations import AppendBlobOperations +from ._operations import BlockBlobOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_operations.py new file mode 100644 index 000000000000..8e09a2844603 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_operations.py @@ -0,0 +1,10151 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from io import IOBase +import json +import sys +from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload + +from azure.core import MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._model_base import SdkJSONEncoder, _deserialize +from ...operations._operations import ( + build_append_blob_append_block_from_url_request, + build_append_blob_append_block_request, + build_append_blob_create_request, + build_append_blob_seal_request, + build_blob_abort_copy_from_url_request, + build_blob_acquire_lease_request, + build_blob_break_lease_request, + build_blob_change_lease_request, + build_blob_copy_from_url_request, + build_blob_create_snapshot_request, + build_blob_delete_immutability_policy_request, + build_blob_delete_request, + build_blob_download_request, + build_blob_get_account_info_request, + build_blob_get_properties_request, + build_blob_get_tags_request, + build_blob_query_request, + build_blob_release_lease_request, + build_blob_renew_lease_request, + build_blob_set_expiry_request, + build_blob_set_http_headers_request, + build_blob_set_immutability_policy_request, + build_blob_set_legal_hold_request, + build_blob_set_metadata_request, + build_blob_set_tags_request, + build_blob_set_tier_request, + build_blob_start_copy_from_url_request, + build_blob_undelete_request, + build_block_blob_commit_block_list_request, + build_block_blob_get_block_list_request, + build_block_blob_put_blob_from_url_request, + build_block_blob_stage_block_from_url_request, + build_block_blob_stage_block_request, + build_block_blob_upload_request, + build_container_acquire_lease_request, + build_container_break_lease_request, + build_container_change_lease_request, + build_container_create_request, + build_container_delete_request, + build_container_filter_blobs_request, + build_container_get_access_policy_request, + build_container_get_account_info_request, + build_container_get_properties_request, + build_container_list_blob_flat_segment_request, + build_container_list_blob_hierarchy_segment_request, + build_container_release_lease_request, + build_container_rename_request, + build_container_renew_lease_request, + build_container_set_access_policy_request, + build_container_set_metadata_request, + build_container_submit_batch_request, + build_container_undelete_request, + build_page_blob_clear_pages_request, + build_page_blob_copy_incremental_request, + build_page_blob_create_request, + build_page_blob_get_page_ranges_diff_request, + build_page_blob_get_page_ranges_request, + build_page_blob_resize_request, + build_page_blob_update_sequence_number_request, + build_page_blob_upload_pages_from_url_request, + build_page_blob_upload_pages_request, + build_service_filter_blobs_request, + build_service_get_account_info_request, + build_service_get_properties_request, + build_service_get_statistics_request, + build_service_get_user_delegation_key_request, + build_service_list_containers_segment_request, + build_service_set_properties_request, + build_service_submit_batch_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`service` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + async def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: _models.StorageServiceProperties, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: ~azure.storage.blob.models.StorageServiceProperties + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: JSON, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: Union[_models.StorageServiceProperties, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Is one of the following types: StorageServiceProperties, JSON, IO[bytes] + Required. + :type body: ~azure.storage.blob.models.StorageServiceProperties or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_service_set_properties_request( + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_properties( + self, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + """Retrieves properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: StorageServiceProperties. The StorageServiceProperties is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_service_get_properties_request( + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.StorageServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_statistics( + self, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: StorageServiceStats. The StorageServiceStats is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_service_get_statistics_request( + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.StorageServiceStats, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list_containers_segment( + self, + *, + version: str, + prefix: Optional[str] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :return: ListContainersSegmentResponse. The ListContainersSegmentResponse is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_service_list_containers_segment_request( + version=version, + prefix=prefix, + timeout=timeout, + marker=marker, + maxresults=maxresults, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListContainersSegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def get_user_delegation_key( + self, + body: _models.KeyInfo, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: ~azure.storage.blob.models.KeyInfo + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_user_delegation_key( + self, + body: JSON, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_user_delegation_key( + self, + body: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_user_delegation_key( + self, + body: Union[_models.KeyInfo, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Is one of the following types: KeyInfo, JSON, + IO[bytes] Required. + :type body: ~azure.storage.blob.models.KeyInfo or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_service_get_user_delegation_key_request( + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.UserDelegationKey, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_service_get_account_info_request( + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def submit_batch( # pylint: disable=inconsistent-return-statements + self, *, content_length: int, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_service_submit_batch_request( + content_length=content_length, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def filter_blobs( + self, + *, + version: str, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :paramtype where: str + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.FilterBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :return: FilterBlobSegment. The FilterBlobSegment is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_service_filter_blobs_request( + version=version, + where=where, + include=include, + timeout=timeout, + marker=marker, + maxresults=maxresults, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.FilterBlobSegment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`container` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + access: Optional[Union[str, _models.PublicAccessType]] = None, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword access: Specifies whether data in the container may be accessed publicly and the level + of access. Possible values include: 'container', 'blob'. Known values are: "blob" and + "container". Default value is None. + :paramtype access: str or ~azure.storage.blob.models.PublicAccessType + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. Default value + is None. + :paramtype default_encryption_scope: str + :keyword deny_encryption_scope_override: Optional. Version 2019-07-07 and later. Specifies + that the request will fail if the target container does not have the same encryption scope as + the source container. Default value is None. + :paramtype deny_encryption_scope_override: bool + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_create_request( + container_name=container_name, + version=version, + access=access, + default_encryption_scope=default_encryption_scope, + deny_encryption_scope_override=deny_encryption_scope_override, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_get_properties_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_delete_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """operation sets one or more user-defined name-value pairs for the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_set_metadata_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_access_policy( + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: list of SignedIdentifier + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + + _request = build_container_get_access_policy_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.SignedIdentifier], response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: List[_models.SignedIdentifier], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Required. + :type acl: list[~azure.storage.blob.models.SignedIdentifier] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Required. + :type acl: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: Union[List[_models.SignedIdentifier], IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Is either a [SignedIdentifier] type or a + IO[bytes] type. Required. + :type acl: list[~azure.storage.blob.models.SignedIdentifier] or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(acl, (IOBase, bytes)): + _content = acl + else: + _content = json.dumps(acl, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_container_set_access_policy_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + lease_id=lease_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def undelete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Restores a previously-deleted container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name + of the deleted container to restore. Default value is None. + :paramtype deleted_container_name: str + :keyword deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. Default value is None. + :paramtype deleted_container_version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_undelete_request( + container_name=container_name, + version=version, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def rename( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + source_container_name: str, + version: str, + source_lease_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Renames an existing container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword source_container_name: Required. Specifies the name of the container to rename. + Required. + :paramtype source_container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword source_lease_id: A lease ID for the source path. If specified, the source path must + have an active lease and the lease ID must match. Default value is None. + :paramtype source_lease_id: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_rename_request( + container_name=container_name, + source_container_name=source_container_name, + version=version, + source_lease_id=source_lease_id, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def submit_batch( + self, + container_name: str, + body: bytes, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> bytes: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param container_name: The name of the container. Required. + :type container_name: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[bytes] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_container_submit_batch_request( + container_name=container_name, + content_length=content_length, + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def filter_blobs( + self, + container_name: str, + *, + version: str, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :paramtype where: str + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.FilterBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: FilterBlobSegment. The FilterBlobSegment is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_container_filter_blobs_request( + container_name=container_name, + version=version, + marker=marker, + maxresults=maxresults, + where=where, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.FilterBlobSegment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Required. + :paramtype duration: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_acquire_lease_request( + container_name=container_name, + duration=duration, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_release_lease_request( + container_name=container_name, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_renew_lease_request( + container_name=container_name, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + break_period: Optional[int] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword break_period: For a break operation, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. This break period is only used if it is + shorter than the time remaining on the lease. If longer, the time remaining on the lease is + used. A new lease will not be available before the break period has expired, but the lease may + be held for longer than the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease period elapses, and an + infinite lease breaks immediately. Default value is None. + :paramtype break_period: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_break_lease_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + break_period=break_period, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + proposed_lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword proposed_lease_id: Required. The proposed lease ID for the container. Required. + :paramtype proposed_lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_change_lease_request( + container_name=container_name, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def list_blob_flat_segment( + self, + container_name: str, + *, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.ListBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: ListBlobsFlatSegmentResponse. The ListBlobsFlatSegmentResponse is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_container_list_blob_flat_segment_request( + container_name=container_name, + version=version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListBlobsFlatSegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list_blob_hierarchy_segment( + self, + container_name: str, + *, + delimiter: str, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword delimiter: When the request includes this parameter, the operation returns a + BlobPrefix element in the response body that acts as a placeholder for all blobs whose names + begin with the same substring up to the appearance of the delimiter character. The delimiter + may be a single character or a string. Required. + :paramtype delimiter: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.ListBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: ListBlobsHierarchySegmentResponse. The ListBlobsHierarchySegmentResponse is compatible + with MutableMapping + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_container_list_blob_hierarchy_segment_request( + container_name=container_name, + delimiter=delimiter, + version=version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListBlobsHierarchySegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, container_name: str, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_get_account_info_request( + container_name=container_name, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`blob` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def download( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_content_md5: Optional[bool] = None, + range_content_crc64: Optional[bool] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword range_content_md5: Optional. When this header is set to true and specified together + with the Range header, the service returns the MD5 hash for the range, as long as the range is + less than or equal to 4 MB in size. Default value is None. + :paramtype range_content_md5: bool + :keyword range_content_crc64: Optional. When this header is set to true and specified together + with the Range header, the service returns the CRC64 hash for the range, as long as the range + is less than or equal to 4 MB in size. Default value is None. + :paramtype range_content_crc64: bool + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[bytes] = kwargs.pop("cls", None) + + _request = build_blob_download_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=lease_id, + range_content_md5=range_content_md5, + range_content_crc64=range_content_crc64, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_tags=if_tags, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_properties( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_get_properties_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_delete_type: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the \\"include=deleted\\" query parameter to discover which blobs and snapshots have + been soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All + other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status + code of 404 (ResourceNotFound). + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. Known values are: "none" and "include". + Default value is None. + :paramtype delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. Known values are: "none" and + "include". Default value is None. + :paramtype blob_delete_type: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_delete_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + timeout=timeout, + lease_id=lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_delete_type=blob_delete_type, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def undelete( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, **kwargs: Any + ) -> None: + """ "Undelete a blob that was previously soft deleted". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_undelete_request( + container_name=container_name, + blob=blob, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_expiry( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + expiry_options: Union[str, _models.BlobExpiryOptions], + expires_on: str, + version: str, + **kwargs: Any + ) -> None: + """ "Set the expiration time of a blob". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :paramtype expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Required. + :paramtype expires_on: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_expiry_request( + container_name=container_name, + blob=blob, + expiry_options=expiry_options, + expires_on=expires_on, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_http_headers( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Set HTTP Headers operation sets system properties on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_http_headers_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + blob_cache_control=blob_cache_control, + blob_content_type=blob_content_type, + blob_content_md5=blob_content_md5, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + **kwargs: Any + ) -> None: + """ "Set the immutability policy of a blob". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_immutability_policy_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + if_unmodified_since=if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_delete_immutability_policy_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_legal_hold( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + legal_hold: bool, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """The Set Legal Hold operation sets a legal hold on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword legal_hold: Required. Specifies the legal hold status to set on the blob. Required. + :paramtype legal_hold: bool + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_legal_hold_request( + container_name=container_name, + blob=blob, + legal_hold=legal_hold, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Set Metadata operation sets user-defined metadata for the specified blob as one or more + name-value pairs. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_metadata_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Required. + :paramtype duration: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_acquire_lease_request( + container_name=container_name, + blob=blob, + duration=duration, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_release_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_renew_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_change_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword break_period: For a break operation, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. This break period is only used if it is + shorter than the time remaining on the lease. If longer, the time remaining on the lease is + used. A new lease will not be available before the break period has expired, but the lease may + be held for longer than the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease period elapses, and an + infinite lease breaks immediately. Default value is None. + :paramtype break_period: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_break_lease_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + break_period=break_period, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def create_snapshot( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_create_snapshot_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def start_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword rehydrate_priority: Optional: Indicates the priority with which to rehydrate an + archived blob. Known values are: "High" and "Standard". Default value is None. + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. Default value is None. + :paramtype source_if_tags: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. Default value is None. + :paramtype seal_blob: bool + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_start_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_tags=source_if_tags, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :paramtype copy_source_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + tier=tier, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=encryption_scope, + copy_source_tags=copy_source_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. Required. + :paramtype copy_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_abort_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_id=copy_id, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_tier( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + access_tier: Union[str, _models.AccessTier], + version: str, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Tier operation sets the tier on a block blob. The operation is allowed on a page blob + or block blob, but not on an append blob. A block blob's tier determines Hot/Cool/Archive + storage type. This operation does not update the blob's ETag. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword access_tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". + Required. + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword rehydrate_priority: Optional: Indicates the priority with which to rehydrate an + archived blob. Known values are: "High" and "Standard". Default value is None. + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_tier_request( + container_name=container_name, + blob=blob, + access_tier=access_tier, + version=version, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_get_account_info_request( + container_name=container_name, + blob=blob, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @overload + async def query( + self, + container_name: str, + blob: str, + query_request: _models.QueryRequest, + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: ~azure.storage.blob.models.QueryRequest + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def query( + self, + container_name: str, + blob: str, + query_request: JSON, + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def query( + self, + container_name: str, + blob: str, + query_request: IO[bytes], + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def query( + self, + container_name: str, + blob: str, + query_request: Union[_models.QueryRequest, JSON, IO[bytes]], + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Is one of the following types: QueryRequest, JSON, + IO[bytes] Required. + :type query_request: ~azure.storage.blob.models.QueryRequest or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[bytes] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(query_request, (IOBase, bytes)): + _content = query_request + else: + _content = json.dumps(query_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_query_request( + container_name=container_name, + blob=blob, + version=version, + snapshot=snapshot, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "str", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_tags( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> _models.BlobTags: + """The Get Blob Tags operation enables users to get tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: BlobTags. The BlobTags is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.BlobTags + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + + _request = build_blob_get_tags_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + timeout=timeout, + snapshot=snapshot, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BlobTags, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: _models.BlobTags, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: ~azure.storage.blob.models.BlobTags + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: JSON, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: JSON + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: IO[bytes], + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: IO[bytes] + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: Union[_models.BlobTags, JSON, IO[bytes]], + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Is one of the following types: BlobTags, JSON, IO[bytes] Required. + :type tags: ~azure.storage.blob.models.BlobTags or JSON or IO[bytes] + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(tags, (IOBase, bytes)): + _content = tags + else: + _content = json.dumps(tags, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_set_tags_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + timeout=timeout, + lease_id=lease_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + if_tags=if_tags, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`page_blob` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :paramtype blob_content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. + :paramtype tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_sequence_number: Optional. The sequence number is a user-controlled property + that you can use to track requests. The value of the sequence number must be between 0 and 2^63 + - 1. The default value is 0. Default value is None. + :paramtype blob_sequence_number: int + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_create_request( + container_name=container_name, + blob=blob, + content_length=content_length, + blob_content_length=blob_content_length, + version=version, + timeout=timeout, + tier=tier, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_sequence_number=blob_sequence_number, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def upload_pages( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_page_blob_upload_pages_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def clear_pages( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Clear Pages operation clears a range of pages from a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_clear_pages_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + timeout=timeout, + range=range, + lease_id=lease_id, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def upload_pages_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + range: str, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. Required. + :paramtype source_range: str + :keyword range: Bytes of source data in the specified range. The length of this range should + match the ContentLength header and x-ms-range/Range destination range header. Required. + :paramtype range: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_upload_pages_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + source_url=source_url, + source_range=source_range, + range=range, + version=version, + timeout=timeout, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + lease_id=lease_id, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + copy_source_authorization=copy_source_authorization, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_page_ranges( + self, + container_name: str, + blob: str, + *, + version: str, + snapshot: Optional[str] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.PageList: + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: PageList. The PageList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.PageList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + + _request = build_page_blob_get_page_ranges_request( + container_name=container_name, + blob=blob, + version=version, + snapshot=snapshot, + range=range, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + marker=marker, + maxresults=maxresults, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PageList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_page_ranges_diff( + self, + container_name: str, + blob: str, + *, + prevsnapshot: str, + prev_snapshot_url: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.PageList: + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob or + snapshot of a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is + a DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. Required. + :paramtype prevsnapshot: str + :keyword prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. Required. + :paramtype prev_snapshot_url: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: PageList. The PageList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.PageList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + + _request = build_page_blob_get_page_ranges_diff_request( + container_name=container_name, + blob=blob, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + version=version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + marker=marker, + maxresults=maxresults, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PageList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def resize( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Resize operation increases the size of the page blob to the specified size. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :paramtype blob_content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_resize_request( + container_name=container_name, + blob=blob, + blob_content_length=blob_content_length, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def update_sequence_number( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + sequence_number_action: Union[str, _models.SequenceNumberActionType], + blob_sequence_number: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Update Sequence Number operation sets the blob's sequence number. The operation will fail + if the specified sequence number is less than the current sequence number of the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword sequence_number_action: Required if the x-ms-blob-sequence-number header is set for + the request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. Known values are: "increment", "max", and "update". + Required. + :paramtype sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :keyword blob_sequence_number: Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The value of the sequence number must + be between 0 and 2^63 - 1. Required. + :paramtype blob_sequence_number: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_update_sequence_number_request( + container_name=container_name, + blob=blob, + sequence_number_action=sequence_number_action, + blob_sequence_number=blob_sequence_number, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def copy_incremental( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_copy_incremental_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`append_blob` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new append blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_create_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + timeout=timeout, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def append_block( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + content_length: int, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an append blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_append_blob_append_block_request( + container_name=container_name, + blob=blob, + content_length=content_length, + max_size=max_size, + append_position=append_position, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "int", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def append_block_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Append Block From URL operation creates a new block to be committed as part of an append + blob where the contents are read from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. Required. + :paramtype source_range: str + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_append_block_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + source_url=source_url, + source_range=source_range, + max_size=max_size, + append_position=append_position, + version=version, + timeout=timeout, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + copy_source_authorization=copy_source_authorization, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "int", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def seal( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + append_position: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_seal_request( + container_name=container_name, + blob=blob, + append_position=append_position, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.BlobClient`'s + :attr:`block_blob` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def upload( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_block_blob_upload_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def put_blob_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + copy_source: str, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. Default value is None. + :paramtype source_if_tags: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. Default value is None. + :paramtype copy_source_blob_properties: bool + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :paramtype copy_source_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_block_blob_put_blob_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + copy_source=copy_source, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + source_if_modified_since=source_if_modified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_tags=source_if_tags, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def stage_block( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + block_id: str, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + timeout: Optional[int] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword block_id: A valid Base64 string value that identifies the block. Prior to encoding, + the string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. Required. + :paramtype block_id: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_block_blob_stage_block_request( + container_name=container_name, + blob=blob, + block_id=block_id, + content_length=content_length, + version=version, + transactional_content_md5=transactional_content_md5, + timeout=timeout, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def stage_block_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + block_id: str, + content_length: int, + source_url: str, + source_range: str, + version: str, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Stage Block From URL operation creates a new block to be committed as part of a blob where + the contents are read from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword block_id: A valid Base64 string value that identifies the block. Prior to encoding, + the string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. Required. + :paramtype block_id: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. Required. + :paramtype source_range: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_block_blob_stage_block_from_url_request( + container_name=container_name, + blob=blob, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + version=version, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + timeout=timeout, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + lease_id=lease_id, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + copy_source_authorization=copy_source_authorization, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @overload + async def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: _models.BlockLookupList, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: JSON, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: Union[_models.BlockLookupList, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Is one of the following types: BlockLookupList, JSON, IO[bytes] + Required. + :type blocks: ~azure.storage.blob.models.BlockLookupList or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blocks, (IOBase, bytes)): + _content = blocks + else: + _content = json.dumps(blocks, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_block_blob_commit_block_list_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + blob_cache_control=blob_cache_control, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_block_list( + self, + container_name: str, + blob: str, + *, + list_type: Union[str, _models.BlockListType], + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> _models.BlockLookupList: + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Required. + :paramtype list_type: str or ~azure.storage.blob.models.BlockListType + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: BlockLookupList. The BlockLookupList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.BlockLookupList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlockLookupList] = kwargs.pop("cls", None) + + _request = build_block_blob_get_block_list_request( + container_name=container_name, + blob=blob, + list_type=list_type, + version=version, + snapshot=snapshot, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BlockLookupList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_patch.py similarity index 69% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_patch.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_patch.py index 71dde502c70f..f7dd32510333 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_patch.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/operations/_patch.py @@ -2,19 +2,13 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ - - """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports +from typing import List - from typing import List -__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/models/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/models/__init__.py new file mode 100644 index 000000000000..72756aaf08cd --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/models/__init__.py @@ -0,0 +1,151 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models import AccessPolicy +from ._models import ArrowConfiguration +from ._models import ArrowField +from ._models import BlobFlatListSegment +from ._models import BlobHierarchyListSegment +from ._models import BlobItemInternal +from ._models import BlobMetadata +from ._models import BlobName +from ._models import BlobPrefix +from ._models import BlobPropertiesInternal +from ._models import BlobTag +from ._models import BlobTags +from ._models import BlockLookupList +from ._models import ClearRange +from ._models import ContainerItem +from ._models import ContainerMetadata +from ._models import ContainerProperties +from ._models import CorsRule +from ._models import DelimitedTextConfiguration +from ._models import FilterBlobItem +from ._models import FilterBlobSegment +from ._models import GeoReplication +from ._models import JsonTextConfiguration +from ._models import KeyInfo +from ._models import ListBlobsFlatSegmentResponse +from ._models import ListBlobsHierarchySegmentResponse +from ._models import ListContainersSegmentResponse +from ._models import Logging +from ._models import Metrics +from ._models import MetricsProperties +from ._models import MetricsServiceProperties +from ._models import ObjectReplicationMetadata +from ._models import PageList +from ._models import PageRange +from ._models import ParquetConfiguration +from ._models import QueryFormat +from ._models import QueryRequest +from ._models import QuerySerialization +from ._models import RetentionPolicy +from ._models import SignedIdentifier +from ._models import StaticWebsite +from ._models import StorageError +from ._models import StorageServiceProperties +from ._models import StorageServiceStats +from ._models import UserDelegationKey + +from ._enums import AccessTier +from ._enums import AccountKind +from ._enums import ArchiveStatus +from ._enums import BlobExpiryOptions +from ._enums import BlobImmutabilityPolicyMode +from ._enums import BlobType +from ._enums import BlockListType +from ._enums import CopyStatus +from ._enums import DeleteSnapshotsOptionType +from ._enums import FilterBlobsIncludes +from ._enums import GeoReplicationStatus +from ._enums import LeaseDuration +from ._enums import LeaseState +from ._enums import LeaseStatus +from ._enums import ListBlobsIncludes +from ._enums import PremiumPageBlobAccessTier +from ._enums import PublicAccessType +from ._enums import QueryRequestTypeSqlOnly +from ._enums import QueryType +from ._enums import RehydratePriority +from ._enums import SequenceNumberActionType +from ._enums import SkuName +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AccessPolicy", + "ArrowConfiguration", + "ArrowField", + "BlobFlatListSegment", + "BlobHierarchyListSegment", + "BlobItemInternal", + "BlobMetadata", + "BlobName", + "BlobPrefix", + "BlobPropertiesInternal", + "BlobTag", + "BlobTags", + "BlockLookupList", + "ClearRange", + "ContainerItem", + "ContainerMetadata", + "ContainerProperties", + "CorsRule", + "DelimitedTextConfiguration", + "FilterBlobItem", + "FilterBlobSegment", + "GeoReplication", + "JsonTextConfiguration", + "KeyInfo", + "ListBlobsFlatSegmentResponse", + "ListBlobsHierarchySegmentResponse", + "ListContainersSegmentResponse", + "Logging", + "Metrics", + "MetricsProperties", + "MetricsServiceProperties", + "ObjectReplicationMetadata", + "PageList", + "PageRange", + "ParquetConfiguration", + "QueryFormat", + "QueryRequest", + "QuerySerialization", + "RetentionPolicy", + "SignedIdentifier", + "StaticWebsite", + "StorageError", + "StorageServiceProperties", + "StorageServiceStats", + "UserDelegationKey", + "AccessTier", + "AccountKind", + "ArchiveStatus", + "BlobExpiryOptions", + "BlobImmutabilityPolicyMode", + "BlobType", + "BlockListType", + "CopyStatus", + "DeleteSnapshotsOptionType", + "FilterBlobsIncludes", + "GeoReplicationStatus", + "LeaseDuration", + "LeaseState", + "LeaseStatus", + "ListBlobsIncludes", + "PremiumPageBlobAccessTier", + "PublicAccessType", + "QueryRequestTypeSqlOnly", + "QueryType", + "RehydratePriority", + "SequenceNumberActionType", + "SkuName", +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/models/_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_enums.py new file mode 100644 index 000000000000..6f1654fe32e9 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_enums.py @@ -0,0 +1,308 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The access tiers.""" + + P4 = "P4" + """The hot P4 tier.""" + P6 = "P6" + """The hot P6 tier.""" + P10 = "P10" + """The hot P10 tier.""" + P15 = "P15" + """The hot P15 tier.""" + P20 = "P20" + """The hot P20 tier.""" + P30 = "P30" + """The hot P30 tier.""" + P40 = "P40" + """The hot P40 tier.""" + P50 = "P50" + """The hot P50 tier.""" + P60 = "P60" + """The hot P60 tier.""" + P70 = "P70" + """The hot P70 tier.""" + P80 = "P80" + """The hot P80 tier.""" + HOT = "Hot" + """The hot access tier.""" + COOL = "Cool" + """The cool access tier.""" + ARCHIVE = "Archive" + """The archive access tier.""" + + +class AccountKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The account kind.""" + + STORAGE = "Storage" + """The storage account is a general-purpose account.""" + BLOB_STORAGE = "BlobStorage" + """The storage account is a blob storage account.""" + STORAGE_V2 = "StorageV2" + """The storage account is a storage V2 account.""" + FILE_STORAGE = "FileStorage" + """The storage account is a file storage account.""" + BLOCK_BLOB_STORAGE = "BlockBlobStorage" + """The storage account is a block blob storage account.""" + + +class ArchiveStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The archive status.""" + + REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" + """The archive status is rehydrating pending to hot.""" + REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" + """The archive status is rehydrating pending to cool.""" + REHYDRATE_PENDING_TO_ARCHIVE = "rehydrate-pending-to-archive" + """The archive status is rehydrating pending to archive.""" + REHYDRATE_PENDING_TO_EXPIRED = "rehydrate-pending-to-expired" + """The archive status is rehydrating pending to expired.""" + + +class BlobExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The blob expiration options.""" + + NEVER_EXPIRE = "NeverExpire" + """Never expire.""" + RELATIVE_TO_CREATION = "RelativeToCreation" + """Relative to creation time.""" + RELATIVE_TO_NOW = "RelativeToNow" + """Relative to now.""" + ABSOLUTE = "Absolute" + """Absolute time.""" + + +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The immutability policy mode.""" + + MUTABLE = "Mutable" + """The immutability policy is mutable.""" + LOCKED = "Locked" + """The immutability policy is locked.""" + UNLOCKED = "Unlocked" + """The immutability policy is unlocked.""" + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The blob type.""" + + BLOCK_BLOB = "BlockBlob" + """The blob is a block blob.""" + PAGE_BLOB = "PageBlob" + """The blob is a page blob.""" + APPEND_BLOB = "AppendBlob" + """The blob is an append blob.""" + + +class BlockListType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The block list types.""" + + COMMITTED = "committed" + """The list of committed blocks.""" + UNCOMITTED = "uncommitted" + """The list of uncommitted blocks.""" + ALL = "all" + """Both lists together.""" + + +class CopyStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The copy status.""" + + PENDING = "pending" + """The copy operation is pending.""" + SUCCESS = "success" + """The copy operation succeeded.""" + FAILED = "failed" + """The copy operation failed.""" + ABORTED = "aborted" + """The copy operation is aborted.""" + + +class DeleteSnapshotsOptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The delete snapshots option type.""" + + NONNE = "none" + """The delete snapshots include option is not specified.""" + INCLUDE = "include" + """The delete snapshots include option is include.""" + + +class FilterBlobsIncludes(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The filter blobs includes.""" + + NONE = "none" + """The filter includes no versions.""" + VERSIONS = "versions" + """The filter includes n versions.""" + + +class GeoReplicationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The geo replication status.""" + + LIVE = "live" + """The geo replication is live.""" + BOOTSTRAP = "bootstrap" + """The geo replication is bootstrap.""" + UNAVAILABLE = "unavailable" + """The geo replication is unavailable.""" + + +class LeaseDuration(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The lease duration.""" + + INFINITE = "infinite" + """The lease is of infinite duration.""" + FIXED = "fixed" + """The lease is of fixed duration.""" + + +class LeaseState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The lease state.""" + + AVAILABLE = "available" + """The lease is available.""" + LEASED = "leased" + """The lease is currently leased.""" + EXPIRED = "expired" + """The lease is expired.""" + BREAKING = "breaking" + """The lease is breaking.""" + BROKEN = "broken" + """The lease is broken.""" + + +class LeaseStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The lease status.""" + + UNLOCKED = "unlocked" + """The lease is unlocked.""" + LOCKED = "locked" + """The lease is locked.""" + + +class ListBlobsIncludes(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The list blob includes parameter values.""" + + COPIES = "copies" + """The include copies.""" + DELETED = "deleted" + """The include deleted blobs.""" + METADATA = "metadata" + """The include metadata.""" + SNAPSHOTS = "snapshots" + """The include snapshots.""" + UNCOMMITTED_BLOBS = "uncommittedblobs" + """The include uncommitted blobs.""" + VERSIONS = "versions" + """The include versions.""" + TAGS = "tags" + """The include tags.""" + IMMUTABILITY_POLICY = "immutabilitypolicy" + """The include immutable policy.""" + LEGAL_HOLD = "legalhold" + """The include legal hold.""" + DELETED_WITH_VERSIONS = "deletedwithversions" + """The include deleted with versions.""" + + +class PremiumPageBlobAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The premium page blob access tier types.""" + + P4 = "P4" + """The premium page blob access tier is P4.""" + P6 = "P6" + """The premium page blob access tier is P6.""" + P10 = "P10" + """The premium page blob access tier is P10.""" + P15 = "P15" + """The premium page blob access tier is P15.""" + P20 = "P20" + """The premium page blob access tier is P20.""" + P30 = "P30" + """The premium page blob access tier is P30.""" + P40 = "P40" + """The premium page blob access tier is P40.""" + P50 = "P50" + """The premium page blob access tier is P50.""" + P60 = "P60" + """The premium page blob access tier is P60.""" + P70 = "P70" + """The premium page blob access tier is P70.""" + P80 = "P80" + """The premium page blob access tier is P80.""" + + +class PublicAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The public access types.""" + + BLOB = "blob" + """Blob access.""" + CONTAINER = "container" + """Container access.""" + + +class QueryRequestTypeSqlOnly(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The query request, note only SQL supported.""" + + SQL = "SQL" + """The SQL request query type.""" + + +class QueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The query format type.""" + + DELIMITED = "delimited" + """The query format type is delimited.""" + JSON = "json" + """The query format type is JSON.""" + ARROW = "arrow" + """The query format type is Apache Arrow.""" + PARQUET = "parquet" + """The query format type is Parquet.""" + + +class RehydratePriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The rehydrate priority.""" + + HIGH = "High" + """The rehydrate priority is high.""" + STANDARD = "Standard" + """The rehydrate priority is standard.""" + + +class SequenceNumberActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The sequence number actions.""" + + INCREMENT = "increment" + """Increment the sequence number.""" + MAX = "max" + """Set the maximum for the sequence number.""" + UPDATE = "update" + """Update the sequence number.""" + + +class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The SKU types.""" + + STANDARD_LRS = "Standard_LRS" + """The standard LRS SKU.""" + STANDARD_GRS = "tandard_GRS" + """The standard GRS SKU.""" + STANDARD_RAGRS = "Standard_RAGRS" + """The standard RAGRS SKU.""" + STANDARD_ZRS = "Standard_ZRS" + """The standard ZRS SKU.""" + PREMIUM_LRS = "Premium_LRS" + """The premium LRS SKU.""" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/models/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_models.py new file mode 100644 index 000000000000..7f7c4fe97b67 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_models.py @@ -0,0 +1,2044 @@ +# coding=utf-8 +# pylint: disable=too-many-lines +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .. import _model_base +from .._model_base import rest_field + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models + + +class AccessPolicy(_model_base.Model): + """Represents an access policy. + + + :ivar start: The date-time the policy is active. Required. + :vartype start: ~datetime.datetime + :ivar expiry: The date-time the policy expires. Required. + :vartype expiry: ~datetime.datetime + :ivar permission: The permissions for acl the policy. Required. + :vartype permission: str + """ + + start: datetime.datetime = rest_field(format="rfc3339") + """The date-time the policy is active. Required.""" + expiry: datetime.datetime = rest_field(format="rfc3339") + """The date-time the policy expires. Required.""" + permission: str = rest_field() + """The permissions for acl the policy. Required.""" + + @overload + def __init__( + self, + *, + start: datetime.datetime, + expiry: datetime.datetime, + permission: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ArrowConfiguration(_model_base.Model): + """Represents the Apache Arrow configuration. + + All required parameters must be populated in order to send to server. + + :ivar schema: The Apache Arrow schema. Required. + :vartype schema: list[~azure.storage.blob.models.ArrowField] + """ + + schema: List["_models.ArrowField"] = rest_field() + """The Apache Arrow schema. Required.""" + + @overload + def __init__( + self, + *, + schema: List["_models.ArrowField"], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ArrowField(_model_base.Model): + """Represents an Apache Arrow field. + + All required parameters must be populated in order to send to server. + + :ivar type: The arrow field type. Required. + :vartype type: str + :ivar name: The arrow field name. + :vartype name: str + :ivar precision: The arrow field precision. + :vartype precision: int + :ivar scale: The arrow field scale. + :vartype scale: int + """ + + type: str = rest_field() + """The arrow field type. Required.""" + name: Optional[str] = rest_field() + """The arrow field name.""" + precision: Optional[int] = rest_field() + """The arrow field precision.""" + scale: Optional[int] = rest_field() + """The arrow field scale.""" + + @overload + def __init__( + self, + *, + type: str, + name: Optional[str] = None, + precision: Optional[int] = None, + scale: Optional[int] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobFlatListSegment(_model_base.Model): + """The blob flat list segment. + + + :ivar blob_items: The blob items. Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + blob_items: List["_models.BlobItemInternal"] = rest_field(name="blobItems") + """The blob items. Required.""" + + @overload + def __init__( + self, + *, + blob_items: List["_models.BlobItemInternal"], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobHierarchyListSegment(_model_base.Model): + """Represents an array of blobs. + + + :ivar blob_items: The blob items. Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + :ivar blob_prefixes: The blob prefixes. Required. + :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + """ + + blob_items: List["_models.BlobItemInternal"] = rest_field(name="blobItems") + """The blob items. Required.""" + blob_prefixes: List["_models.BlobPrefix"] = rest_field(name="blobPrefixes") + """The blob prefixes. Required.""" + + @overload + def __init__( + self, + *, + blob_items: List["_models.BlobItemInternal"], + blob_prefixes: List["_models.BlobPrefix"], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobItemInternal(_model_base.Model): + """An Azure Storage Blob. + + + :ivar name: The name of the blob. Required. + :vartype name: ~azure.storage.blob.models.BlobName + :ivar deleted: Whether the blob is deleted. Required. + :vartype deleted: bool + :ivar snapshot: The snapshot of the blob. Required. + :vartype snapshot: str + :ivar version_id: The version id of the blob. + :vartype version_id: str + :ivar is_current_version: Whether the blob is the current version. + :vartype is_current_version: bool + :ivar properties: The properties of the blob. Required. + :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :ivar metadata: The metadata of the blob. + :vartype metadata: ~azure.storage.blob.models.BlobMetadata + :ivar blob_tags: The tags of the blob. + :vartype blob_tags: list[~azure.storage.blob.models.BlobTag] + :ivar object_replication_metadata: The object replication metadata of the blob. + :vartype object_replication_metadata: ~azure.storage.blob.models.ObjectReplicationMetadata + :ivar has_versions_only: Whether the blog has versions only. + :vartype has_versions_only: bool + """ + + name: "_models.BlobName" = rest_field() + """The name of the blob. Required.""" + deleted: bool = rest_field() + """Whether the blob is deleted. Required.""" + snapshot: str = rest_field() + """The snapshot of the blob. Required.""" + version_id: Optional[str] = rest_field(name="versionId") + """The version id of the blob.""" + is_current_version: Optional[bool] = rest_field(name="isCurrentVersion") + """Whether the blob is the current version.""" + properties: "_models.BlobPropertiesInternal" = rest_field() + """The properties of the blob. Required.""" + metadata: Optional["_models.BlobMetadata"] = rest_field() + """The metadata of the blob.""" + blob_tags: Optional[List["_models.BlobTag"]] = rest_field(name="blobTags") + """The tags of the blob.""" + object_replication_metadata: Optional["_models.ObjectReplicationMetadata"] = rest_field( + name="objectReplicationMetadata" + ) + """The object replication metadata of the blob.""" + has_versions_only: Optional[bool] = rest_field(name="hasVersionsOnly") + """Whether the blog has versions only.""" + + @overload + def __init__( + self, + *, + name: "_models.BlobName", + deleted: bool, + snapshot: str, + properties: "_models.BlobPropertiesInternal", + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + metadata: Optional["_models.BlobMetadata"] = None, + blob_tags: Optional[List["_models.BlobTag"]] = None, + object_replication_metadata: Optional["_models.ObjectReplicationMetadata"] = None, + has_versions_only: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobMetadata(_model_base.Model): + """The blob metadata. + + + :ivar encrypted: Whether the blob metadata is encrypted. Required. + :vartype encrypted: str + """ + + encrypted: str = rest_field() + """Whether the blob metadata is encrypted. Required.""" + + @overload + def __init__( + self, + *, + encrypted: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobName(_model_base.Model): + """Represents a blob name. + + + :ivar encoded: Whether the blob name is encoded. Required. + :vartype encoded: bool + :ivar content: The blob name. Required. + :vartype content: str + """ + + encoded: bool = rest_field() + """Whether the blob name is encoded. Required.""" + content: str = rest_field() + """The blob name. Required.""" + + @overload + def __init__( + self, + *, + encoded: bool, + content: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobPrefix(_model_base.Model): + """Represents a blob prefix. + + + :ivar name: The blob name. Required. + :vartype name: ~azure.storage.blob.models.BlobName + """ + + name: "_models.BlobName" = rest_field() + """The blob name. Required.""" + + @overload + def __init__( + self, + *, + name: "_models.BlobName", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobPropertiesInternal(_model_base.Model): # pylint: disable=too-many-instance-attributes + """The properties of a blob. + + + :ivar creation_time: The date-time the blob was created in RFC1123 format. + :vartype creation_time: ~datetime.datetime + :ivar last_modified: The date-time the blob was last modified in RFC1123 format. Required. + :vartype last_modified: ~datetime.datetime + :ivar e_tag: The blog ETag. Required. + :vartype e_tag: str + :ivar content_length: The content length of the blob. + :vartype content_length: int + :ivar content_type: The content type of the blob. + :vartype content_type: str + :ivar content_encoding: The content encoding of the blob. + :vartype content_encoding: str + :ivar content_language: The content language of the blob. + :vartype content_language: str + :ivar content_md5: The content MD5 of the blob. + :vartype content_md5: str + :ivar content_disposition: The content disposition of the blob. + :vartype content_disposition: str + :ivar cache_control: The cache control of the blob. + :vartype cache_control: str + :ivar blob_sequence_number: The sequence number of the blob. + :vartype blob_sequence_number: int + :ivar blob_type: The blob type. Known values are: "BlockBlob", "PageBlob", and "AppendBlob". + :vartype blob_type: str or ~azure.storage.blob.models.BlobType + :ivar lease_status: The lease status of the blob. Known values are: "unlocked" and "locked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatus + :ivar lease_state: The lease state of the blob. Known values are: "available", "leased", + "expired", "breaking", and "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseState + :ivar lease_duration: The lease duration of the blob. Known values are: "infinite" and "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDuration + :ivar copy_id: The copy ID of the blob. + :vartype copy_id: str + :ivar copy_status: The copy status of the blob. Known values are: "pending", "success", + "failed", and "aborted". + :vartype copy_status: str or ~azure.storage.blob.models.CopyStatus + :ivar copy_source: The copy source of the blob. + :vartype copy_source: str + :ivar copy_progress: The copy progress of the blob. + :vartype copy_progress: str + :ivar copy_completion_time: The copy completion time of the blob. + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: The copy status description of the blob. + :vartype copy_status_description: str + :ivar server_encrypted: Whether the blog is encrypted on the server. + :vartype server_encrypted: bool + :ivar incremental_copy: Whether the blog is incremental copy. + :vartype incremental_copy: bool + :ivar destination_snapshot: The name of the desination snapshot. + :vartype destination_snapshot: str + :ivar deleted_time: The time the blob was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: The remaining retention days of the blob. + :vartype remaining_retention_days: int + :ivar access_tier: The access tier of the blob. Known values are: "P4", "P6", "P10", "P15", + "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". + :vartype access_tier: str or ~azure.storage.blob.models.AccessTier + :ivar access_tier_inferred: Whether the access tier is inferred. + :vartype access_tier_inferred: bool + :ivar archive_status: The archive status of the blob. Known values are: + "rehydrate-pending-to-hot", "rehydrate-pending-to-cool", "rehydrate-pending-to-archive", and + "rehydrate-pending-to-expired". + :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar encryption_scope: The encryption scope of the blob. + :vartype encryption_scope: str + :ivar access_tier_change_time: The access tier change time of the blob. + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: The number of tags for the blob. + :vartype tag_count: int + :ivar expires_on: The expire time of the blob. + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: Whether the blob is sealed. + :vartype is_sealed: bool + :ivar rehydrate_priority: The rehydrate priority of the blob. Known values are: "High" and + "Standard". + :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :ivar last_access_time: The last access time of the blob. + :vartype last_access_time: ~datetime.datetime + :ivar immutability_policy_expires_on: The immutability policy until time of the blob. + :vartype immutability_policy_expires_on: ~datetime.datetime + :ivar immutability_policy_mode: The immutability policy mode of the blob. Known values are: + "Mutable", "Locked", and "Unlocked". + :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :ivar legal_hold: Whether the blob is under legal hold. + :vartype legal_hold: bool + """ + + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", format="rfc7231") + """The date-time the blob was created in RFC1123 format.""" + last_modified: datetime.datetime = rest_field(name="lastModified", format="rfc7231") + """The date-time the blob was last modified in RFC1123 format. Required.""" + e_tag: str = rest_field(name="eTag") + """The blog ETag. Required.""" + content_length: Optional[int] = rest_field(name="contentLength") + """The content length of the blob.""" + content_type: Optional[str] = rest_field(name="contentType") + """The content type of the blob.""" + content_encoding: Optional[str] = rest_field(name="contentEncoding") + """The content encoding of the blob.""" + content_language: Optional[str] = rest_field(name="contentLanguage") + """The content language of the blob.""" + content_md5: Optional[str] = rest_field(name="contentMd5") + """The content MD5 of the blob.""" + content_disposition: Optional[str] = rest_field(name="contentDisposition") + """The content disposition of the blob.""" + cache_control: Optional[str] = rest_field(name="cacheControl") + """The cache control of the blob.""" + blob_sequence_number: Optional[int] = rest_field(name="blobSequenceNumber") + """The sequence number of the blob.""" + blob_type: Optional[Union[str, "_models.BlobType"]] = rest_field(name="blobType") + """The blob type. Known values are: \"BlockBlob\", \"PageBlob\", and \"AppendBlob\".""" + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = rest_field(name="leaseStatus") + """The lease status of the blob. Known values are: \"unlocked\" and \"locked\".""" + lease_state: Optional[Union[str, "_models.LeaseState"]] = rest_field(name="leaseState") + """The lease state of the blob. Known values are: \"available\", \"leased\", \"expired\", + \"breaking\", and \"broken\".""" + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = rest_field(name="leaseDuration") + """The lease duration of the blob. Known values are: \"infinite\" and \"fixed\".""" + copy_id: Optional[str] = rest_field(name="copyId") + """The copy ID of the blob.""" + copy_status: Optional[Union[str, "_models.CopyStatus"]] = rest_field(name="copyStatus") + """The copy status of the blob. Known values are: \"pending\", \"success\", \"failed\", and + \"aborted\".""" + copy_source: Optional[str] = rest_field(name="copySource") + """The copy source of the blob.""" + copy_progress: Optional[str] = rest_field(name="copyProgress") + """The copy progress of the blob.""" + copy_completion_time: Optional[datetime.datetime] = rest_field(name="copyCompletionTime", format="rfc7231") + """The copy completion time of the blob.""" + copy_status_description: Optional[str] = rest_field(name="copyStatusDescription") + """The copy status description of the blob.""" + server_encrypted: Optional[bool] = rest_field(name="serverEncrypted") + """Whether the blog is encrypted on the server.""" + incremental_copy: Optional[bool] = rest_field(name="incrementalCopy") + """Whether the blog is incremental copy.""" + destination_snapshot: Optional[str] = rest_field(name="destinationSnapshot") + """The name of the desination snapshot.""" + deleted_time: Optional[datetime.datetime] = rest_field(name="deletedTime", format="rfc7231") + """The time the blob was deleted.""" + remaining_retention_days: Optional[int] = rest_field(name="remainingRetentionDays") + """The remaining retention days of the blob.""" + access_tier: Optional[Union[str, "_models.AccessTier"]] = rest_field(name="accessTier") + """The access tier of the blob. Known values are: \"P4\", \"P6\", \"P10\", \"P15\", \"P20\", + \"P30\", \"P40\", \"P50\", \"P60\", \"P70\", \"P80\", \"Hot\", \"Cool\", and \"Archive\".""" + access_tier_inferred: Optional[bool] = rest_field(name="accessTierInferred") + """Whether the access tier is inferred.""" + archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = rest_field(name="archiveStatus") + """The archive status of the blob. Known values are: \"rehydrate-pending-to-hot\", + \"rehydrate-pending-to-cool\", \"rehydrate-pending-to-archive\", and + \"rehydrate-pending-to-expired\".""" + encryption_scope: Optional[str] = rest_field(name="encryptionScope") + """The encryption scope of the blob.""" + access_tier_change_time: Optional[datetime.datetime] = rest_field(name="accessTierChangeTime", format="rfc7231") + """The access tier change time of the blob.""" + tag_count: Optional[int] = rest_field(name="tagCount") + """The number of tags for the blob.""" + expires_on: Optional[datetime.datetime] = rest_field(name="expiryTime", format="rfc7231") + """The expire time of the blob.""" + is_sealed: Optional[bool] = rest_field(name="sealed") + """Whether the blob is sealed.""" + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = rest_field(name="rehydratePriority") + """The rehydrate priority of the blob. Known values are: \"High\" and \"Standard\".""" + last_access_time: Optional[datetime.datetime] = rest_field(name="lastAccessTime", format="rfc7231") + """The last access time of the blob.""" + immutability_policy_expires_on: Optional[datetime.datetime] = rest_field( + name="immutabilityPolicyUntilDate", format="rfc7231" + ) + """The immutability policy until time of the blob.""" + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = rest_field( + name="immutabilityPolicyMode" + ) + """The immutability policy mode of the blob. Known values are: \"Mutable\", \"Locked\", and + \"Unlocked\".""" + legal_hold: Optional[bool] = rest_field(name="legalHold") + """Whether the blob is under legal hold.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + last_modified: datetime.datetime, + e_tag: str, + creation_time: Optional[datetime.datetime] = None, + content_length: Optional[int] = None, + content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_md5: Optional[str] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_type: Optional[Union[str, "_models.BlobType"]] = None, + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None, + lease_state: Optional[Union[str, "_models.LeaseState"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None, + copy_id: Optional[str] = None, + copy_status: Optional[Union[str, "_models.CopyStatus"]] = None, + copy_source: Optional[str] = None, + copy_progress: Optional[str] = None, + copy_completion_time: Optional[datetime.datetime] = None, + copy_status_description: Optional[str] = None, + server_encrypted: Optional[bool] = None, + incremental_copy: Optional[bool] = None, + destination_snapshot: Optional[str] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + access_tier: Optional[Union[str, "_models.AccessTier"]] = None, + access_tier_inferred: Optional[bool] = None, + archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = None, + encryption_scope: Optional[str] = None, + access_tier_change_time: Optional[datetime.datetime] = None, + tag_count: Optional[int] = None, + expires_on: Optional[datetime.datetime] = None, + is_sealed: Optional[bool] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + last_access_time: Optional[datetime.datetime] = None, + immutability_policy_expires_on: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobTag(_model_base.Model): + """The blob tags. + + + :ivar key: The key of the tag. Required. + :vartype key: str + :ivar value: The value of the tag. Required. + :vartype value: str + """ + + key: str = rest_field() + """The key of the tag. Required.""" + value: str = rest_field() + """The value of the tag. Required.""" + + @overload + def __init__( + self, + *, + key: str, + value: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlobTags(_model_base.Model): + """Represents blob tags. + + + :ivar blob_tag_set: Represents the blob tags. Required. + :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + + blob_tag_set: List["_models.BlobTag"] = rest_field(name="blobTagSet") + """Represents the blob tags. Required.""" + + @overload + def __init__( + self, + *, + blob_tag_set: List["_models.BlobTag"], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BlockLookupList(_model_base.Model): + """The Block lookup list. + + + :ivar committed: The committed blocks. Required. + :vartype committed: list[str] + :ivar uncommitted: The uncommitted blocks. Required. + :vartype uncommitted: list[str] + :ivar latest: The latest blocks. Required. + :vartype latest: list[str] + """ + + committed: List[str] = rest_field() + """The committed blocks. Required.""" + uncommitted: List[str] = rest_field() + """The uncommitted blocks. Required.""" + latest: List[str] = rest_field() + """The latest blocks. Required.""" + + @overload + def __init__( + self, + *, + committed: List[str], + uncommitted: List[str], + latest: List[str], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ClearRange(_model_base.Model): + """The clear range. + + + :ivar start: The start of the byte range. Required. + :vartype start: int + :ivar end: The end of the byte range. Required. + :vartype end: int + """ + + start: int = rest_field() + """The start of the byte range. Required.""" + end: int = rest_field() + """The end of the byte range. Required.""" + + @overload + def __init__( + self, + *, + start: int, + end: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ContainerItem(_model_base.Model): + """An Azure Storage container. + + + :ivar name: The name of the container. Required. + :vartype name: str + :ivar delete: Whether the container is deleted. + :vartype delete: bool + :ivar version: The version of the container. + :vartype version: str + :ivar properties: The properties of the container. Required. + :vartype properties: ~azure.storage.blob.models.ContainerProperties + :ivar metadata: The metadata of the container. + :vartype metadata: ~azure.storage.blob.models.ContainerMetadata + """ + + name: str = rest_field() + """The name of the container. Required.""" + delete: Optional[bool] = rest_field() + """Whether the container is deleted.""" + version: Optional[str] = rest_field() + """The version of the container.""" + properties: "_models.ContainerProperties" = rest_field() + """The properties of the container. Required.""" + metadata: Optional["_models.ContainerMetadata"] = rest_field() + """The metadata of the container.""" + + @overload + def __init__( + self, + *, + name: str, + properties: "_models.ContainerProperties", + delete: Optional[bool] = None, + version: Optional[str] = None, + metadata: Optional["_models.ContainerMetadata"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ContainerMetadata(_model_base.Model): + """The metadata of a container. + + + :ivar encrypted: Whether the metadata is encrypted. Required. + :vartype encrypted: str + """ + + encrypted: str = rest_field() + """Whether the metadata is encrypted. Required.""" + + @overload + def __init__( + self, + *, + encrypted: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ContainerProperties(_model_base.Model): # pylint: disable=too-many-instance-attributes + """The properties of a container. + + + :ivar creation_time: The date-time the container was created in RFC1123 format. + :vartype creation_time: ~datetime.datetime + :ivar last_modified: The date-time the container was last modified in RFC1123 format. Required. + :vartype last_modified: ~datetime.datetime + :ivar e_tag: The ETag of the container. Required. + :vartype e_tag: str + :ivar lease_status: The lease status of the container. Known values are: "unlocked" and + "locked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatus + :ivar lease_state: The lease state of the container. Known values are: "available", "leased", + "expired", "breaking", and "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseState + :ivar lease_duration: The lease duration of the container. Known values are: "infinite" and + "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDuration + :ivar public_access: The public access type of the container. Known values are: "blob" and + "container". + :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType + :ivar has_immutability_policy: Whether it has an immutability policy. + :vartype has_immutability_policy: bool + :ivar default_encryption_scope: The default encryption scope of the container. + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: Whether to prevent encryption scope override. + :vartype prevent_encryption_scope_override: bool + :ivar has_legal_hold: The has legal hold status of the container. + :vartype has_legal_hold: bool + :ivar deleted_time: The deleted time of the container. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: The remaining retention days of the container. + :vartype remaining_retention_days: int + :ivar is_immutable_storage_with_versioning_enabled: Whether immutable storage with versioning + is enabled. + :vartype is_immutable_storage_with_versioning_enabled: bool + """ + + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", format="rfc7231") + """The date-time the container was created in RFC1123 format.""" + last_modified: datetime.datetime = rest_field(name="lastModified", format="rfc7231") + """The date-time the container was last modified in RFC1123 format. Required.""" + e_tag: str = rest_field(name="eTag") + """The ETag of the container. Required.""" + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = rest_field(name="leaseStatus") + """The lease status of the container. Known values are: \"unlocked\" and \"locked\".""" + lease_state: Optional[Union[str, "_models.LeaseState"]] = rest_field(name="leaseState") + """The lease state of the container. Known values are: \"available\", \"leased\", \"expired\", + \"breaking\", and \"broken\".""" + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = rest_field(name="leaseDuration") + """The lease duration of the container. Known values are: \"infinite\" and \"fixed\".""" + public_access: Optional[Union[str, "_models.PublicAccessType"]] = rest_field(name="publicAccess") + """The public access type of the container. Known values are: \"blob\" and \"container\".""" + has_immutability_policy: Optional[bool] = rest_field(name="hasImmutabilityPolicy") + """Whether it has an immutability policy.""" + default_encryption_scope: Optional[str] = rest_field(name="defaultEncryptionScope") + """The default encryption scope of the container.""" + prevent_encryption_scope_override: Optional[bool] = rest_field(name="denyEncryptionScopeOverride") + """Whether to prevent encryption scope override.""" + has_legal_hold: Optional[bool] = rest_field(name="hasLegalHold") + """The has legal hold status of the container.""" + deleted_time: Optional[datetime.datetime] = rest_field(name="deletedTime", format="rfc7231") + """The deleted time of the container.""" + remaining_retention_days: Optional[int] = rest_field(name="remainingRetentionDays") + """The remaining retention days of the container.""" + is_immutable_storage_with_versioning_enabled: Optional[bool] = rest_field( + name="immutableStorageWithVersioningEnabled" + ) + """Whether immutable storage with versioning is enabled.""" + + @overload + def __init__( + self, + *, + last_modified: datetime.datetime, + e_tag: str, + creation_time: Optional[datetime.datetime] = None, + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None, + lease_state: Optional[Union[str, "_models.LeaseState"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None, + public_access: Optional[Union[str, "_models.PublicAccessType"]] = None, + has_immutability_policy: Optional[bool] = None, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + has_legal_hold: Optional[bool] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + is_immutable_storage_with_versioning_enabled: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class CorsRule(_model_base.Model): + """The CORS rule. + + + :ivar allowed_origins: The allowed origins. Required. + :vartype allowed_origins: str + :ivar allowed_methods: The allowed methods. Required. + :vartype allowed_methods: str + :ivar allowed_headers: The allowed headers. Required. + :vartype allowed_headers: str + :ivar exposed_headers: The exposed headers. Required. + :vartype exposed_headers: str + :ivar max_age_in_seconds: The maximum age in seconds. Required. + :vartype max_age_in_seconds: int + """ + + allowed_origins: str = rest_field(name="allowedOrigins") + """The allowed origins. Required.""" + allowed_methods: str = rest_field(name="allowedMethods") + """The allowed methods. Required.""" + allowed_headers: str = rest_field(name="allowedHeaders") + """The allowed headers. Required.""" + exposed_headers: str = rest_field(name="exposedHeaders") + """The exposed headers. Required.""" + max_age_in_seconds: int = rest_field(name="maxAgeInSeconds") + """The maximum age in seconds. Required.""" + + @overload + def __init__( + self, + *, + allowed_origins: str, + allowed_methods: str, + allowed_headers: str, + exposed_headers: str, + max_age_in_seconds: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class DelimitedTextConfiguration(_model_base.Model): + """Represents the delimited text configuration. + + :ivar column_separator: The string used to separate columns. + :vartype column_separator: str + :ivar field_quote: The string used to quote a specific field. + :vartype field_quote: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + :ivar escape_char: The string used to escape a quote character in a field. + :vartype escape_char: str + :ivar headers_present: Represents whether the data has headers. + :vartype headers_present: bool + """ + + column_separator: Optional[str] = rest_field(name="columnSeparator") + """The string used to separate columns.""" + field_quote: Optional[str] = rest_field(name="fieldQuote") + """The string used to quote a specific field.""" + record_separator: Optional[str] = rest_field(name="recordSeparator") + """The string used to separate records.""" + escape_char: Optional[str] = rest_field(name="escapeChar") + """The string used to escape a quote character in a field.""" + headers_present: Optional[bool] = rest_field(name="headersPresent") + """Represents whether the data has headers.""" + + @overload + def __init__( + self, + *, + column_separator: Optional[str] = None, + field_quote: Optional[str] = None, + record_separator: Optional[str] = None, + escape_char: Optional[str] = None, + headers_present: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class FilterBlobItem(_model_base.Model): + """The filter blob item. + + + :ivar name: The name of the blob. Required. + :vartype name: str + :ivar container_name: The properties of the blob. Required. + :vartype container_name: str + :ivar tags: The metadata of the blob. + :vartype tags: ~azure.storage.blob.models.BlobTags + :ivar version_id: The version ID of the blob. + :vartype version_id: str + :ivar is_current_version: Whether it is the current version of the blob. + :vartype is_current_version: bool + """ + + name: str = rest_field() + """The name of the blob. Required.""" + container_name: str = rest_field(name="containerName") + """The properties of the blob. Required.""" + tags: Optional["_models.BlobTags"] = rest_field() + """The metadata of the blob.""" + version_id: Optional[str] = rest_field(name="versionId") + """The version ID of the blob.""" + is_current_version: Optional[bool] = rest_field(name="isCurrentVersion") + """Whether it is the current version of the blob.""" + + @overload + def __init__( + self, + *, + name: str, + container_name: str, + tags: Optional["_models.BlobTags"] = None, + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class FilterBlobSegment(_model_base.Model): + """The result of a Filter Blobs API call. + + + :ivar service_endpoint: The service endpoint. Required. + :vartype service_endpoint: str + :ivar where: The filter for the blobs. Required. + :vartype where: str + :ivar blobs: The blob segment. Required. + :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :ivar next_marker: The next marker of the blobs. + :vartype next_marker: str + """ + + service_endpoint: str = rest_field(name="serviceEndpoint") + """The service endpoint. Required.""" + where: str = rest_field() + """The filter for the blobs. Required.""" + blobs: List["_models.FilterBlobItem"] = rest_field() + """The blob segment. Required.""" + next_marker: Optional[str] = rest_field(name="nextMarker") + """The next marker of the blobs.""" + + @overload + def __init__( + self, + *, + service_endpoint: str, + where: str, + blobs: List["_models.FilterBlobItem"], + next_marker: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class GeoReplication(_model_base.Model): + """The geo replication status. + + + :ivar status: The georeplication status. Required. Known values are: "live", "bootstrap", and + "unavailable". + :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatus + :ivar last_sync_time: The last sync time. Required. + :vartype last_sync_time: str + """ + + status: Union[str, "_models.GeoReplicationStatus"] = rest_field() + """The georeplication status. Required. Known values are: \"live\", \"bootstrap\", and + \"unavailable\".""" + last_sync_time: str = rest_field(name="lastSyncTime") + """The last sync time. Required.""" + + @overload + def __init__( + self, + *, + status: Union[str, "_models.GeoReplicationStatus"], + last_sync_time: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class JsonTextConfiguration(_model_base.Model): + """Represents the JSON text configuration. + + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + """ + + record_separator: Optional[str] = rest_field(name="recordSeparator") + """The string used to separate records.""" + + @overload + def __init__( + self, + *, + record_separator: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class KeyInfo(_model_base.Model): + """Key information. + + All required parameters must be populated in order to send to server. + + :ivar start: The date-time the key is active. Required. + :vartype start: str + :ivar expiry: The date-time the key expires. Required. + :vartype expiry: str + """ + + start: str = rest_field() + """The date-time the key is active. Required.""" + expiry: str = rest_field() + """The date-time the key expires. Required.""" + + @overload + def __init__( + self, + *, + start: str, + expiry: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ListBlobsFlatSegmentResponse(_model_base.Model): + """An enumeration of blobs. + + + :ivar service_endpoint: The service endpoint. Required. + :vartype service_endpoint: str + :ivar container_name: The container name. Required. + :vartype container_name: str + :ivar rrefix: The prefix of the blobs. + :vartype rrefix: str + :ivar marker: The marker of the blobs. + :vartype marker: str + :ivar max_results: The max results of the blobs. + :vartype max_results: int + :ivar segment: The blob segment. Required. + :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment + :ivar next_marker: The next marker of the blobs. + :vartype next_marker: str + """ + + service_endpoint: str = rest_field(name="serviceEndpoint") + """The service endpoint. Required.""" + container_name: str = rest_field(name="containerName") + """The container name. Required.""" + rrefix: Optional[str] = rest_field() + """The prefix of the blobs.""" + marker: Optional[str] = rest_field() + """The marker of the blobs.""" + max_results: Optional[int] = rest_field(name="maxResults") + """The max results of the blobs.""" + segment: "_models.BlobFlatListSegment" = rest_field() + """The blob segment. Required.""" + next_marker: Optional[str] = rest_field(name="nextMarker") + """The next marker of the blobs.""" + + @overload + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "_models.BlobFlatListSegment", + rrefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ListBlobsHierarchySegmentResponse(_model_base.Model): + """An enumeration of blobs. + + + :ivar service_endpoint: The service endpoint. Required. + :vartype service_endpoint: str + :ivar container_name: The container name. Required. + :vartype container_name: str + :ivar delimiter: The delimiter of the blobs. + :vartype delimiter: str + :ivar prefix: The prefix of the blobs. + :vartype prefix: str + :ivar marker: The marker of the blobs. + :vartype marker: str + :ivar max_results: The max results of the blobs. + :vartype max_results: int + :ivar segment: The blob segment. Required. + :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :ivar next_marker: The next marker of the blobs. + :vartype next_marker: str + """ + + service_endpoint: str = rest_field(name="serviceEndpoint") + """The service endpoint. Required.""" + container_name: str = rest_field(name="containerName") + """The container name. Required.""" + delimiter: Optional[str] = rest_field() + """The delimiter of the blobs.""" + prefix: Optional[str] = rest_field() + """The prefix of the blobs.""" + marker: Optional[str] = rest_field() + """The marker of the blobs.""" + max_results: Optional[int] = rest_field(name="maxResults") + """The max results of the blobs.""" + segment: "_models.BlobHierarchyListSegment" = rest_field() + """The blob segment. Required.""" + next_marker: Optional[str] = rest_field(name="nextMarker") + """The next marker of the blobs.""" + + @overload + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "_models.BlobHierarchyListSegment", + delimiter: Optional[str] = None, + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ListContainersSegmentResponse(_model_base.Model): + """The list container segement response. + + + :ivar service_endpoint: The service endpoint. Required. + :vartype service_endpoint: str + :ivar prefix: The prefix of the containers. + :vartype prefix: str + :ivar marker: The marker of the containers. + :vartype marker: str + :ivar maxx_results: The max results of the containers. + :vartype maxx_results: int + :ivar container_items: The container segment. Required. + :vartype container_items: list[~azure.storage.blob.models.ContainerItem] + :ivar next_marker: The next marker of the containers. + :vartype next_marker: str + """ + + service_endpoint: str = rest_field(name="serviceEndpoint") + """The service endpoint. Required.""" + prefix: Optional[str] = rest_field() + """The prefix of the containers.""" + marker: Optional[str] = rest_field() + """The marker of the containers.""" + maxx_results: Optional[int] = rest_field(name="maxxResults") + """The max results of the containers.""" + container_items: List["_models.ContainerItem"] = rest_field(name="containerItems") + """The container segment. Required.""" + next_marker: Optional[str] = rest_field(name="nextMarker") + """The next marker of the containers.""" + + @overload + def __init__( + self, + *, + service_endpoint: str, + container_items: List["_models.ContainerItem"], + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxx_results: Optional[int] = None, + next_marker: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class Logging(_model_base.Model): + """The logging properties. + + + :ivar version: The version of the logging properties. Required. + :vartype version: str + :ivar delete: Whether delete operation is logged. Required. + :vartype delete: bool + :ivar read: Whether read operation is logged. Required. + :vartype read: bool + :ivar write: Whether write operation is logged. Required. + :vartype write: bool + :ivar retention_policy: The retention policy of the logs. Required. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + version: str = rest_field() + """The version of the logging properties. Required.""" + delete: bool = rest_field() + """Whether delete operation is logged. Required.""" + read: bool = rest_field() + """Whether read operation is logged. Required.""" + write: bool = rest_field() + """Whether write operation is logged. Required.""" + retention_policy: "_models.RetentionPolicy" = rest_field(name="retentionPolicy") + """The retention policy of the logs. Required.""" + + @overload + def __init__( + self, + *, + version: str, + delete: bool, + read: bool, + write: bool, + retention_policy: "_models.RetentionPolicy", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class Metrics(_model_base.Model): + """The metrics properties. + + + :ivar version: The version of the metrics properties. + :vartype version: str + :ivar enabled: Whether it is enabled. Required. + :vartype enabled: bool + :ivar include_apis: Whether to include API in the metrics. + :vartype include_apis: bool + :ivar retention_policy: The retention policy of the metrics. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar service_properties: The service properties of the metrics. + :vartype service_properties: ~azure.storage.blob.models.MetricsServiceProperties + """ + + version: Optional[str] = rest_field() + """The version of the metrics properties.""" + enabled: bool = rest_field() + """Whether it is enabled. Required.""" + include_apis: Optional[bool] = rest_field(name="includeApis") + """Whether to include API in the metrics.""" + retention_policy: Optional["_models.RetentionPolicy"] = rest_field(name="retentionPolicy") + """The retention policy of the metrics.""" + service_properties: Optional["_models.MetricsServiceProperties"] = rest_field(name="serviceProperties") + """The service properties of the metrics.""" + + @overload + def __init__( + self, + *, + enabled: bool, + version: Optional[str] = None, + include_apis: Optional[bool] = None, + retention_policy: Optional["_models.RetentionPolicy"] = None, + service_properties: Optional["_models.MetricsServiceProperties"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class MetricsProperties(_model_base.Model): + """The metrics properties. + + + :ivar enabled: Whether to include API in the metrics. Required. + :vartype enabled: bool + :ivar version: The version of the metrics properties. Required. + :vartype version: str + :ivar retention_policy: The retention policy of the metrics. Required. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + enabled: bool = rest_field() + """Whether to include API in the metrics. Required.""" + version: str = rest_field() + """The version of the metrics properties. Required.""" + retention_policy: "_models.RetentionPolicy" = rest_field(name="retentionPolicy") + """The retention policy of the metrics. Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + version: str, + retention_policy: "_models.RetentionPolicy", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class MetricsServiceProperties(_model_base.Model): + """The metrics service properties. + + + :ivar hour_metrics: The hour metrics properties. Required. + :vartype hour_metrics: ~azure.storage.blob.models.MetricsProperties + :ivar minute_metrics: The minute metrics properties. Required. + :vartype minute_metrics: ~azure.storage.blob.models.MetricsProperties + """ + + hour_metrics: "_models.MetricsProperties" = rest_field(name="hourMetrics") + """The hour metrics properties. Required.""" + minute_metrics: "_models.MetricsProperties" = rest_field(name="minuteMetrics") + """The minute metrics properties. Required.""" + + @overload + def __init__( + self, + *, + hour_metrics: "_models.MetricsProperties", + minute_metrics: "_models.MetricsProperties", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ObjectReplicationMetadata(_model_base.Model): + """The object replication metadata.""" + + +class PageList(_model_base.Model): + """Represents a page list. + + + :ivar page_range: The page ranges. Required. + :vartype page_range: list[~azure.storage.blob.models.PageRange] + :ivar clear_range: The clear ranges. Required. + :vartype clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar next_marker: The next marker. + :vartype next_marker: str + """ + + page_range: List["_models.PageRange"] = rest_field(name="pageRange") + """The page ranges. Required.""" + clear_range: List["_models.ClearRange"] = rest_field(name="clearRange") + """The clear ranges. Required.""" + next_marker: Optional[str] = rest_field(name="nextMarker") + """The next marker.""" + + @overload + def __init__( + self, + *, + page_range: List["_models.PageRange"], + clear_range: List["_models.ClearRange"], + next_marker: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class PageRange(_model_base.Model): + """The page range. + + + :ivar start: The start of the byte range. Required. + :vartype start: int + :ivar end: The end of the byte range. Required. + :vartype end: int + """ + + start: int = rest_field() + """The start of the byte range. Required.""" + end: int = rest_field() + """The end of the byte range. Required.""" + + @overload + def __init__( + self, + *, + start: int, + end: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class ParquetConfiguration(_model_base.Model): + """Represents the Parquet configuration.""" + + +class QueryFormat(_model_base.Model): + """The query format settings. + + All required parameters must be populated in order to send to server. + + :ivar type: The query type. Required. Known values are: "delimited", "json", "arrow", and + "parquet". + :vartype type: str or ~azure.storage.blob.models.QueryType + :ivar delimited_text_configuration: The delimited text configuration. + :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :ivar json_text_configuration: The JSON text configuration. + :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :ivar arrow_configuration: The Apache Arrow configuration. + :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :ivar parquet_configuration: The Parquet configuration. + :vartype parquet_configuration: ~azure.storage.blob.models.ParquetConfiguration + """ + + type: Union[str, "_models.QueryType"] = rest_field() + """The query type. Required. Known values are: \"delimited\", \"json\", \"arrow\", and + \"parquet\".""" + delimited_text_configuration: Optional["_models.DelimitedTextConfiguration"] = rest_field( + name="delimitedTextConfiguration" + ) + """The delimited text configuration.""" + json_text_configuration: Optional["_models.JsonTextConfiguration"] = rest_field(name="jsonTextConfiguration") + """The JSON text configuration.""" + arrow_configuration: Optional["_models.ArrowConfiguration"] = rest_field(name="arrowConfiguration") + """The Apache Arrow configuration.""" + parquet_configuration: Optional["_models.ParquetConfiguration"] = rest_field(name="parquetConfiguration") + """The Parquet configuration.""" + + @overload + def __init__( + self, + *, + type: Union[str, "_models.QueryType"], + delimited_text_configuration: Optional["_models.DelimitedTextConfiguration"] = None, + json_text_configuration: Optional["_models.JsonTextConfiguration"] = None, + arrow_configuration: Optional["_models.ArrowConfiguration"] = None, + parquet_configuration: Optional["_models.ParquetConfiguration"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class QueryRequest(_model_base.Model): + """Groups the set of query request settings. + + All required parameters must be populated in order to send to server. + + :ivar query_type: Required. The type of the provided query expression. Required. "SQL" + :vartype query_type: str or ~azure.storage.blob.models.QueryRequestTypeSqlOnly + :ivar expression: The query expression in SQL. The maximum size of the query expression is + 256KiB. Required. + :vartype expression: str + :ivar input_serialization: The input serialization settings. + :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar output_serialization: The output serialization settings. + :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + + query_type: Union[str, "_models.QueryRequestTypeSqlOnly"] = rest_field(name="queryType") + """Required. The type of the provided query expression. Required. \"SQL\"""" + expression: str = rest_field() + """The query expression in SQL. The maximum size of the query expression is 256KiB. Required.""" + input_serialization: Optional["_models.QuerySerialization"] = rest_field(name="inputSerialization") + """The input serialization settings.""" + output_serialization: Optional["_models.QuerySerialization"] = rest_field(name="outputSerialization") + """The output serialization settings.""" + + @overload + def __init__( + self, + *, + query_type: Union[str, "_models.QueryRequestTypeSqlOnly"], + expression: str, + input_serialization: Optional["_models.QuerySerialization"] = None, + output_serialization: Optional["_models.QuerySerialization"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class QuerySerialization(_model_base.Model): + """The query serialization settings. + + All required parameters must be populated in order to send to server. + + :ivar format: The query format. Required. + :vartype format: ~azure.storage.blob.models.QueryFormat + """ + + format: "_models.QueryFormat" = rest_field() + """The query format. Required.""" + + @overload + def __init__( + self, + *, + format: "_models.QueryFormat", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class RetentionPolicy(_model_base.Model): + """The retention policy. + + + :ivar enabled: Whether to enable the retention policy. Required. + :vartype enabled: bool + :ivar days: The number of days to retain the logs. Required. + :vartype days: int + :ivar allow_permanent_delete: Whether to allow permanent delete. Required. + :vartype allow_permanent_delete: bool + """ + + enabled: bool = rest_field() + """Whether to enable the retention policy. Required.""" + days: int = rest_field() + """The number of days to retain the logs. Required.""" + allow_permanent_delete: bool = rest_field(name="allowPermanentDelete") + """Whether to allow permanent delete. Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + days: int, + allow_permanent_delete: bool, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class SignedIdentifier(_model_base.Model): + """The signed identifier. + + + :ivar id: The unique ID for the signed identifier. Required. + :vartype id: str + :ivar access_policy: The access policy for the signed identifier. Required. + :vartype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + + id: str = rest_field() + """The unique ID for the signed identifier. Required.""" + access_policy: "_models.AccessPolicy" = rest_field(name="accessPolicy") + """The access policy for the signed identifier. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + access_policy: "_models.AccessPolicy", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class StaticWebsite(_model_base.Model): + """The static website properties. + + :ivar index_document: The index document. + :vartype index_document: str + :ivar error_document: The error document. + :vartype error_document: str + """ + + index_document: Optional[str] = rest_field(name="indexDocument") + """The index document.""" + error_document: Optional[str] = rest_field(name="errorDocument") + """The error document.""" + + @overload + def __init__( + self, + *, + index_document: Optional[str] = None, + error_document: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class StorageError(_model_base.Model): + """The error response. + + + :ivar message: The error message. Required. + :vartype message: str + """ + + message: str = rest_field(name="Message") + """The error message. Required.""" + + @overload + def __init__( + self, + *, + message: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class StorageServiceProperties(_model_base.Model): + """The service properties. + + :ivar logging: The logging properties. + :vartype logging: ~azure.storage.blob.models.Logging + :ivar hour_metrics: The hour metrics properties. + :vartype hour_metrics: ~azure.storage.blob.models.Metrics + :ivar minute_metrics: The minute metrics properties. + :vartype minute_metrics: ~azure.storage.blob.models.Metrics + :ivar cors: The CORS properties. + :vartype cors: list[~azure.storage.blob.models.CorsRule] + :ivar default_service_version: The default service version. + :vartype default_service_version: str + :ivar delete_retention_policy: The delete retention policy. + :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar static_website: The static website properties. + :vartype static_website: ~azure.storage.blob.models.StaticWebsite + """ + + logging: Optional["_models.Logging"] = rest_field() + """The logging properties.""" + hour_metrics: Optional["_models.Metrics"] = rest_field(name="hourMetrics") + """The hour metrics properties.""" + minute_metrics: Optional["_models.Metrics"] = rest_field(name="minuteMetrics") + """The minute metrics properties.""" + cors: Optional[List["_models.CorsRule"]] = rest_field() + """The CORS properties.""" + default_service_version: Optional[str] = rest_field(name="defaultServiceVersion") + """The default service version.""" + delete_retention_policy: Optional["_models.RetentionPolicy"] = rest_field(name="deleteRetentionPolicy") + """The delete retention policy.""" + static_website: Optional["_models.StaticWebsite"] = rest_field(name="staticWebsite") + """The static website properties.""" + + @overload + def __init__( + self, + *, + logging: Optional["_models.Logging"] = None, + hour_metrics: Optional["_models.Metrics"] = None, + minute_metrics: Optional["_models.Metrics"] = None, + cors: Optional[List["_models.CorsRule"]] = None, + default_service_version: Optional[str] = None, + delete_retention_policy: Optional["_models.RetentionPolicy"] = None, + static_website: Optional["_models.StaticWebsite"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class StorageServiceStats(_model_base.Model): + """Stats for the storage service. + + + :ivar geo_replication: The geo replication stats. Required. + :vartype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + + geo_replication: "_models.GeoReplication" = rest_field(name="geoReplication") + """The geo replication stats. Required.""" + + @overload + def __init__( + self, + *, + geo_replication: "_models.GeoReplication", + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class UserDelegationKey(_model_base.Model): + """A user delegation key. + + + :ivar signed_oid: The Azure Active Directory object ID in GUID format. Required. + :vartype signed_oid: str + :ivar signed_tid: The Azure Active Directory tenant ID in GUID format. Required. + :vartype signed_tid: str + :ivar signed_start: The date-time the key is active. Required. + :vartype signed_start: str + :ivar signed_expiry: The date-time the key expires. Required. + :vartype signed_expiry: str + :ivar signed_service: Abbreviation of the Azure Storage service that accepts the key. Required. + :vartype signed_service: str + :ivar signed_version: The service version that created the key. Required. + :vartype signed_version: str + :ivar value: The key as a base64 string. Required. + :vartype value: str + """ + + signed_oid: str = rest_field(name="signedOid") + """The Azure Active Directory object ID in GUID format. Required.""" + signed_tid: str = rest_field(name="signedTid") + """The Azure Active Directory tenant ID in GUID format. Required.""" + signed_start: str = rest_field(name="signedStart") + """The date-time the key is active. Required.""" + signed_expiry: str = rest_field(name="signedExpiry") + """The date-time the key expires. Required.""" + signed_service: str = rest_field(name="signedService") + """Abbreviation of the Azure Storage service that accepts the key. Required.""" + signed_version: str = rest_field(name="signedVersion") + """The service version that created the key. Required.""" + value: str = rest_field() + """The key as a base64 string. Required.""" + + @overload + def __init__( + self, + *, + signed_oid: str, + signed_tid: str, + signed_start: str, + signed_expiry: str, + signed_service: str, + signed_version: str, + value: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/models/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/__init__.py similarity index 67% rename from sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/__init__.py rename to sdk/storage/azure-storage-blob/azure/storage/blob/operations/__init__.py index 1be05c7aa9a7..9bb9f47cf51b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/__init__.py @@ -2,16 +2,16 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._service_operations import ServiceOperations -from ._container_operations import ContainerOperations -from ._blob_operations import BlobOperations -from ._page_blob_operations import PageBlobOperations -from ._append_blob_operations import AppendBlobOperations -from ._block_blob_operations import BlockBlobOperations +from ._operations import ServiceOperations +from ._operations import ContainerOperations +from ._operations import BlobOperations +from ._operations import PageBlobOperations +from ._operations import AppendBlobOperations +from ._operations import BlockBlobOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_operations.py new file mode 100644 index 000000000000..01c54f2efdb4 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_operations.py @@ -0,0 +1,13888 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from io import IOBase +import json +import sys +from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload + +from azure.core import MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._model_base import SdkJSONEncoder, _deserialize +from .._serialization import Serializer +from .._vendor import prep_if_match, prep_if_none_match + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_service_set_properties_request(*, version: str, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?restype=service&comp=properties" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_get_properties_request(*, version: str, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?restype=service&comp=properties" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_get_statistics_request(*, version: str, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?restype=service&comp=stats" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_list_containers_segment_request( # pylint: disable=name-too-long + *, + version: str, + prefix: Optional[str] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=list" + + # Construct parameters + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_get_user_delegation_key_request( # pylint: disable=name-too-long + *, version: str, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?restype=service&comp=userdelegationkey" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_get_account_info_request(*, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?restype=account&comp=properties" + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_service_submit_batch_request( + *, content_length: int, version: str, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=batch" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_service_filter_blobs_request( + *, + version: str, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=blobs" + + # Construct parameters + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_create_request( + container_name: str, + *, + version: str, + access: Optional[Union[str, _models.PublicAccessType]] = None, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if access is not None: + _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") + if default_encryption_scope is not None: + _headers["x-ms-default-encryption-scope"] = _SERIALIZER.header( + "default_encryption_scope", default_encryption_scope, "str" + ) + if deny_encryption_scope_override is not None: + _headers["x-ms-deny-encryption-scope-override"] = _SERIALIZER.header( + "deny_encryption_scope_override", deny_encryption_scope_override, "bool" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_get_properties_request( + container_name: str, *, version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_delete_request( + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_set_metadata_request( + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=metadata" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_get_access_policy_request( # pylint: disable=name-too-long + container_name: str, *, version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=acl" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_set_access_policy_request( # pylint: disable=name-too-long + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=acl" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_undelete_request( + container_name: str, + *, + version: str, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=undelete" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if deleted_container_name is not None: + _headers["x-ms-deleted-container-name"] = _SERIALIZER.header( + "deleted_container_name", deleted_container_name, "str" + ) + if deleted_container_version is not None: + _headers["x-ms-deleted-container-version"] = _SERIALIZER.header( + "deleted_container_version", deleted_container_version, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_rename_request( + container_name: str, + *, + source_container_name: str, + version: str, + source_lease_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=rename" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-source-container-name"] = _SERIALIZER.header("source_container_name", source_container_name, "str") + if source_lease_id is not None: + _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_submit_batch_request( + container_name: str, *, content_length: int, version: str, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=batch" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_filter_blobs_request( + container_name: str, + *, + version: str, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=blobs" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_acquire_lease_request( + container_name: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?comp=lease&restype=container&acquire" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_release_lease_request( + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?comp=lease&restype=container&release" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_renew_lease_request( + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?comp=lease&restype=container&renew" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_break_lease_request( + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + break_period: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?comp=lease&restype=container&break" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_change_lease_request( + container_name: str, + *, + lease_id: str, + proposed_lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?comp=lease&restype=container&change" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_list_blob_flat_segment_request( # pylint: disable=name-too-long + container_name: str, + *, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=list&flat" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long + container_name: str, + *, + delimiter: str, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=container&comp=list&hierarchy" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_container_get_account_info_request(container_name: str, *, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}?restype=account&comp=properties" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_blob_download_request( + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_content_md5: Optional[bool] = None, + range_content_crc64: Optional[bool] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["versionid"] = _SERIALIZER.header("version_id", version_id, "str") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if range_content_md5 is not None: + _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header("range_content_md5", range_content_md5, "bool") + if range_content_crc64 is not None: + _headers["x-ms-range-get-content-crc64"] = _SERIALIZER.header( + "range_content_crc64", range_content_crc64, "bool" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_get_properties_request( + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + + # Construct headers + _headers["versionid"] = _SERIALIZER.header("version_id", version_id, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_delete_request( + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_delete_type: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + if blob_delete_type is not None: + _params["deletetype"] = _SERIALIZER.query("blob_delete_type", blob_delete_type, "str") + + # Construct headers + _headers["versionid"] = _SERIALIZER.header("version_id", version_id, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if delete_snapshots is not None: + _headers["x-ms-delete-snapshots"] = _SERIALIZER.header("delete_snapshots", delete_snapshots, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_undelete_request(container_name: str, blob: str, *, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=undelete" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_blob_set_expiry_request( + container_name: str, + blob: str, + *, + expiry_options: Union[str, _models.BlobExpiryOptions], + expires_on: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=expiry" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") + _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_blob_set_http_headers_request( + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=properties&SetHTTPHeaders" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_set_immutability_policy_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=immutabilityPolicies" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_delete_immutability_policy_request( # pylint: disable=name-too-long + container_name: str, blob: str, *, version: str, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=immutabilityPolicies" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_set_legal_hold_request( + container_name: str, blob: str, *, legal_hold: bool, version: str, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=legalhold" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_set_metadata_request( + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=metadata" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_acquire_lease_request( + container_name: str, + blob: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=lease&acquire" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_release_lease_request( + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=lease&release" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_renew_lease_request( + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=lease&renew" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_change_lease_request( + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=lease&change" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_break_lease_request( + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=lease&break" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_create_snapshot_request( + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=snapshot" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_start_copy_from_url_request( + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=copy" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if seal_blob is not None: + _headers["x-ms-seal-blob"] = _SERIALIZER.header("seal_blob", seal_blob, "bool") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_copy_from_url_request( + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=copy&sync" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if copy_source_tags is not None: + _headers["x-ms-copy-source-tags"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_abort_copy_from_url_request( + container_name: str, + blob: str, + *, + copy_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=copy©id" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-copy-id"] = _SERIALIZER.header("copy_id", copy_id, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_set_tier_request( + container_name: str, + blob: str, + *, + access_tier: Union[str, _models.AccessTier], + version: str, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?comp=tier" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-access-tier"] = _SERIALIZER.header("access_tier", access_tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_get_account_info_request(container_name: str, blob: str, *, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/?restype=account&comp=properties" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_blob_query_request( + container_name: str, + blob: str, + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/{containerName}/{blob}?comp=query" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_get_tags_request( + container_name: str, + blob: str, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/{containerName}/{blob}?comp=tags" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + + # Construct headers + _headers["versionid"] = _SERIALIZER.header("version_id", version_id, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_set_tags_request( + container_name: str, + blob: str, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}/{containerName}/{blob}?comp=tags" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["versionid"] = _SERIALIZER.header("version_id", version_id, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_create_request( + container_name: str, + blob: str, + *, + content_length: int, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?PageBlob" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + if blob_sequence_number is not None: + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_upload_pages_request( + container_name: str, + blob: str, + *, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=page&update" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_clear_pages_request( + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=page&clear" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_upload_pages_from_url_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + range: str, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=page&update&fromUrl" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-source-url"] = _SERIALIZER.header("source_url", source_url, "str") + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "str") + if source_content_crc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header("source_content_crc64", source_content_crc64, "str") + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_get_page_ranges_request( + container_name: str, + blob: str, + *, + version: str, + snapshot: Optional[str] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=pagelist" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + + # Construct headers + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_get_page_ranges_diff_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + prevsnapshot: str, + prev_snapshot_url: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=pagelist&diff" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + _params["prevsnapshot"] = _SERIALIZER.query("prevsnapshot", prevsnapshot, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int") + + # Construct headers + _headers["x-ms-previous-snapshot-url"] = _SERIALIZER.header("prev_snapshot_url", prev_snapshot_url, "str") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_resize_request( + container_name: str, + blob: str, + *, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=properties&Resize" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_update_sequence_number_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + sequence_number_action: Union[str, _models.SequenceNumberActionType], + blob_sequence_number: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=properties&UpdateSequenceNumber" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-sequence-number-action"] = _SERIALIZER.header( + "sequence_number_action", sequence_number_action, "str" + ) + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_page_blob_copy_incremental_request( + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=incrementalcopy" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_blob_create_request( + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?AppendBlob" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_blob_append_block_request( + container_name: str, + blob: str, + *, + content_length: int, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?AppendBlob/{containerName}/{blob}?comp=appendblock" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_blob_append_block_from_url_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?AppendBlob/{containerName}/{blob}?comp=appendblock&fromUrl" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-source-url"] = _SERIALIZER.header("source_url", source_url, "str") + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "str") + if source_content_crc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header("source_content_crc64", source_content_crc64, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_blob_seal_request( + container_name: str, + blob: str, + *, + append_position: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?AppendBlob/{containerName}/{blob}?comp=seal" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_upload_request( + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?BlockBlob" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_put_blob_from_url_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + content_length: int, + copy_source: str, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?BlockBlob&fromUrl" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if copy_source_blob_properties is not None: + _headers["x-ms-copy-source-blob-properties"] = _SERIALIZER.header( + "copy_source_blob_properties", copy_source_blob_properties, "bool" + ) + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if copy_source_tags is not None: + _headers["x-ms-copy-source-tags"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_stage_block_request( + container_name: str, + blob: str, + *, + block_id: str, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + timeout: Optional[int] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{containerName}/{blob}?comp=block" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_stage_block_from_url_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + block_id: str, + content_length: int, + source_url: str, + source_range: str, + version: str, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=block&fromURL/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-source-url"] = _SERIALIZER.header("source_url", source_url, "str") + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "str") + if source_content_crc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header("source_content_crc64", source_content_crc64, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_commit_block_list_request( # pylint: disable=name-too-long + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=blocklist/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, "str") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "str" + ) + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "str" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_block_blob_get_block_list_request( + container_name: str, + blob: str, + *, + list_type: Union[str, _models.BlockListType], + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/?comp=blocklist/{containerName}/{blob}" + path_format_arguments = { + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + "blob": _SERIALIZER.url("blob", blob, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + _params["blocklisttype"] = _SERIALIZER.query("list_type", list_type, "str") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`service` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: _models.StorageServiceProperties, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: ~azure.storage.blob.models.StorageServiceProperties + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: JSON, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Required. + :type body: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_properties( # pylint: disable=inconsistent-return-statements + self, + body: Union[_models.StorageServiceProperties, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Get the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param body: The storage service properties that specifies the analytics and CORS rules to set + on the Blob service. Is one of the following types: StorageServiceProperties, JSON, IO[bytes] + Required. + :type body: ~azure.storage.blob.models.StorageServiceProperties or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_service_set_properties_request( + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_properties( + self, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + """Retrieves properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: StorageServiceProperties. The StorageServiceProperties is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_service_get_properties_request( + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.StorageServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_statistics( + self, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: StorageServiceStats. The StorageServiceStats is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_service_get_statistics_request( + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.StorageServiceStats, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_containers_segment( + self, + *, + version: str, + prefix: Optional[str] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :return: ListContainersSegmentResponse. The ListContainersSegmentResponse is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_service_list_containers_segment_request( + version=version, + prefix=prefix, + timeout=timeout, + marker=marker, + maxresults=maxresults, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListContainersSegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def get_user_delegation_key( + self, + body: _models.KeyInfo, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: ~azure.storage.blob.models.KeyInfo + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_user_delegation_key( + self, + body: JSON, + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_user_delegation_key( + self, + body: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Required. + :type body: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_user_delegation_key( + self, + body: Union[_models.KeyInfo, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.UserDelegationKey: + """The Get User Delegation Key operation gets the user delegation key for the Blob service. This + is only a valid operation when using User Delegation SAS. For more information, see Create + a user delegation SAS. + + :param body: The user delegation key info. Is one of the following types: KeyInfo, JSON, + IO[bytes] Required. + :type body: ~azure.storage.blob.models.KeyInfo or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: UserDelegationKey. The UserDelegationKey is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_service_get_user_delegation_key_request( + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.UserDelegationKey, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_service_get_account_info_request( + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def submit_batch( # pylint: disable=inconsistent-return-statements + self, *, content_length: int, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_service_submit_batch_request( + content_length=content_length, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def filter_blobs( + self, + *, + version: str, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. + + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :paramtype where: str + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.FilterBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :return: FilterBlobSegment. The FilterBlobSegment is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_service_filter_blobs_request( + version=version, + where=where, + include=include, + timeout=timeout, + marker=marker, + maxresults=maxresults, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.FilterBlobSegment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`container` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + access: Optional[Union[str, _models.PublicAccessType]] = None, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword access: Specifies whether data in the container may be accessed publicly and the level + of access. Possible values include: 'container', 'blob'. Known values are: "blob" and + "container". Default value is None. + :paramtype access: str or ~azure.storage.blob.models.PublicAccessType + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. Default value + is None. + :paramtype default_encryption_scope: str + :keyword deny_encryption_scope_override: Optional. Version 2019-07-07 and later. Specifies + that the request will fail if the target container does not have the same encryption scope as + the source container. Default value is None. + :paramtype deny_encryption_scope_override: bool + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_create_request( + container_name=container_name, + version=version, + access=access, + default_encryption_scope=default_encryption_scope, + deny_encryption_scope_override=deny_encryption_scope_override, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_get_properties_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_delete_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """operation sets one or more user-defined name-value pairs for the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_set_metadata_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_access_policy( + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: list of SignedIdentifier + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + + _request = build_container_get_access_policy_request( + container_name=container_name, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.SignedIdentifier], response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: List[_models.SignedIdentifier], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Required. + :type acl: list[~azure.storage.blob.models.SignedIdentifier] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Required. + :type acl: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + acl: Union[List[_models.SignedIdentifier], IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param container_name: The name of the container. Required. + :type container_name: str + :param acl: The access control list for the container. Is either a [SignedIdentifier] type or a + IO[bytes] type. Required. + :type acl: list[~azure.storage.blob.models.SignedIdentifier] or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(acl, (IOBase, bytes)): + _content = acl + else: + _content = json.dumps(acl, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_container_set_access_policy_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + lease_id=lease_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Restores a previously-deleted container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name + of the deleted container to restore. Default value is None. + :paramtype deleted_container_name: str + :keyword deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. Default value is None. + :paramtype deleted_container_version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_undelete_request( + container_name=container_name, + version=version, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def rename( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + source_container_name: str, + version: str, + source_lease_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Renames an existing container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword source_container_name: Required. Specifies the name of the container to rename. + Required. + :paramtype source_container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword source_lease_id: A lease ID for the source path. If specified, the source path must + have an active lease and the lease ID must match. Default value is None. + :paramtype source_lease_id: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_rename_request( + container_name=container_name, + source_container_name=source_container_name, + version=version, + source_lease_id=source_lease_id, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def submit_batch( + self, + container_name: str, + body: bytes, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> bytes: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param container_name: The name of the container. Required. + :type container_name: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[bytes] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_container_submit_batch_request( + container_name=container_name, + content_length=content_length, + version=version, + timeout=timeout, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def filter_blobs( + self, + container_name: str, + *, + version: str, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + where: Optional[str] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :paramtype where: str + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.FilterBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: FilterBlobSegment. The FilterBlobSegment is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_container_filter_blobs_request( + container_name=container_name, + version=version, + marker=marker, + maxresults=maxresults, + where=where, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.FilterBlobSegment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Required. + :paramtype duration: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_acquire_lease_request( + container_name=container_name, + duration=duration, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_release_lease_request( + container_name=container_name, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_renew_lease_request( + container_name=container_name, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + break_period: Optional[int] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword break_period: For a break operation, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. This break period is only used if it is + shorter than the time remaining on the lease. If longer, the time remaining on the lease is + used. A new lease will not be available before the break period has expired, but the lease may + be held for longer than the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease period elapses, and an + infinite lease breaks immediately. Default value is None. + :paramtype break_period: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_break_lease_request( + container_name=container_name, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + break_period=break_period, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + *, + lease_id: str, + proposed_lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword proposed_lease_id: Required. The proposed lease ID for the container. Required. + :paramtype proposed_lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_change_lease_request( + container_name=container_name, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_blob_flat_segment( + self, + container_name: str, + *, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.ListBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: ListBlobsFlatSegmentResponse. The ListBlobsFlatSegmentResponse is compatible with + MutableMapping + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_container_list_blob_flat_segment_request( + container_name=container_name, + version=version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListBlobsFlatSegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_blob_hierarchy_segment( + self, + container_name: str, + *, + delimiter: str, + version: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludes]]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword delimiter: When the request includes this parameter, the operation returns a + BlobPrefix element in the response body that acts as a placeholder for all blobs whose names + begin with the same substring up to the appearance of the delimiter character. The delimiter + may be a single character or a string. Required. + :paramtype delimiter: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :paramtype prefix: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or ~azure.storage.blob.models.ListBlobsIncludes] + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: ListBlobsHierarchySegmentResponse. The ListBlobsHierarchySegmentResponse is compatible + with MutableMapping + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_container_list_blob_hierarchy_segment_request( + container_name=container_name, + delimiter=delimiter, + version=version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.ListBlobsHierarchySegmentResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, container_name: str, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :param container_name: The name of the container. Required. + :type container_name: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_container_get_account_info_request( + container_name=container_name, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`blob` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def download( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_content_md5: Optional[bool] = None, + range_content_crc64: Optional[bool] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword range_content_md5: Optional. When this header is set to true and specified together + with the Range header, the service returns the MD5 hash for the range, as long as the range is + less than or equal to 4 MB in size. Default value is None. + :paramtype range_content_md5: bool + :keyword range_content_crc64: Optional. When this header is set to true and specified together + with the Range header, the service returns the CRC64 hash for the range, as long as the range + is less than or equal to 4 MB in size. Default value is None. + :paramtype range_content_crc64: bool + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[bytes] = kwargs.pop("cls", None) + + _request = build_blob_download_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=lease_id, + range_content_md5=range_content_md5, + range_content_crc64=range_content_crc64, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_tags=if_tags, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_properties( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_get_properties_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_delete_type: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the \\"include=deleted\\" query parameter to discover which blobs and snapshots have + been soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All + other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status + code of 404 (ResourceNotFound). + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. Known values are: "none" and "include". + Default value is None. + :paramtype delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. Known values are: "none" and + "include". Default value is None. + :paramtype blob_delete_type: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_delete_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + snapshot=snapshot, + timeout=timeout, + lease_id=lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_delete_type=blob_delete_type, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, **kwargs: Any + ) -> None: + """ "Undelete a blob that was previously soft deleted". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_undelete_request( + container_name=container_name, + blob=blob, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_expiry( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + expiry_options: Union[str, _models.BlobExpiryOptions], + expires_on: str, + version: str, + **kwargs: Any + ) -> None: + """ "Set the expiration time of a blob". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :paramtype expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Required. + :paramtype expires_on: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_expiry_request( + container_name=container_name, + blob=blob, + expiry_options=expiry_options, + expires_on=expires_on, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_http_headers( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Set HTTP Headers operation sets system properties on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_http_headers_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + blob_cache_control=blob_cache_control, + blob_content_type=blob_content_type, + blob_content_md5=blob_content_md5, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + **kwargs: Any + ) -> None: + """ "Set the immutability policy of a blob". + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_immutability_policy_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + if_unmodified_since=if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_immutability_policy( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_delete_immutability_policy_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_legal_hold( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + legal_hold: bool, + version: str, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """The Set Legal Hold operation sets a legal hold on the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword legal_hold: Required. Specifies the legal hold status to set on the blob. Required. + :paramtype legal_hold: bool + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_legal_hold_request( + container_name=container_name, + blob=blob, + legal_hold=legal_hold, + version=version, + timeout=timeout, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Set Metadata operation sets user-defined metadata for the specified blob as one or more + name-value pairs. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_metadata_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + duration: int, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Required. + :paramtype duration: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_acquire_lease_request( + container_name=container_name, + blob=blob, + duration=duration, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_release_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_renew_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + lease_id: str, + version: str, + timeout: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword lease_id: Required. A lease ID for the source path. If specified, the source path + must have an active lease and the lease ID must match. Required. + :paramtype lease_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword proposed_lease_id: Optional. The proposed lease ID for the container. Default value + is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_change_lease_request( + container_name=container_name, + blob=blob, + lease_id=lease_id, + version=version, + timeout=timeout, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword break_period: For a break operation, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. This break period is only used if it is + shorter than the time remaining on the lease. If longer, the time remaining on the lease is + used. A new lease will not be available before the break period has expired, but the lease may + be held for longer than the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease period elapses, and an + infinite lease breaks immediately. Default value is None. + :paramtype break_period: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_break_lease_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + break_period=break_period, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def create_snapshot( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_create_snapshot_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def start_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword rehydrate_priority: Optional: Indicates the priority with which to rehydrate an + archived blob. Known values are: "High" and "Standard". Default value is None. + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. Default value is None. + :paramtype source_if_tags: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. Default value is None. + :paramtype seal_blob: bool + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_start_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_tags=source_if_tags, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :paramtype copy_source_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + tier=tier, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=encryption_scope, + copy_source_tags=copy_source_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def abort_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. Required. + :paramtype copy_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_abort_copy_from_url_request( + container_name=container_name, + blob=blob, + copy_id=copy_id, + version=version, + timeout=timeout, + lease_id=lease_id, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_tier( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + access_tier: Union[str, _models.AccessTier], + version: str, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Tier operation sets the tier on a block blob. The operation is allowed on a page blob + or block blob, but not on an append blob. A block blob's tier determines Hot/Cool/Archive + storage type. This operation does not update the blob's ETag. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword access_tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". + Required. + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword rehydrate_priority: Optional: Indicates the priority with which to rehydrate an + archived blob. Known values are: "High" and "Standard". Default value is None. + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_set_tier_request( + container_name=container_name, + blob=blob, + access_tier=access_tier, + version=version, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, container_name: str, blob: str, *, version: str, **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_get_account_info_request( + container_name=container_name, + blob=blob, + version=version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @overload + def query( + self, + container_name: str, + blob: str, + query_request: _models.QueryRequest, + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: ~azure.storage.blob.models.QueryRequest + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def query( + self, + container_name: str, + blob: str, + query_request: JSON, + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def query( + self, + container_name: str, + blob: str, + query_request: IO[bytes], + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Required. + :type query_request: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query( + self, + container_name: str, + blob: str, + query_request: Union[_models.QueryRequest, JSON, IO[bytes]], + *, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bytes: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param query_request: The query request. Is one of the following types: QueryRequest, JSON, + IO[bytes] Required. + :type query_request: ~azure.storage.blob.models.QueryRequest or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bytes + :rtype: bytes + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[bytes] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(query_request, (IOBase, bytes)): + _content = query_request + else: + _content = json.dumps(query_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_query_request( + container_name=container_name, + blob=blob, + version=version, + snapshot=snapshot, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "str", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(bytes, response.json(), format="base64") + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_tags( + self, + container_name: str, + blob: str, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> _models.BlobTags: + """The Get Blob Tags operation enables users to get tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: BlobTags. The BlobTags is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.BlobTags + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + + _request = build_blob_get_tags_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + timeout=timeout, + snapshot=snapshot, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BlobTags, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: _models.BlobTags, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: ~azure.storage.blob.models.BlobTags + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: JSON, + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: JSON + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: IO[bytes], + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Required. + :type tags: IO[bytes] + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_tags( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + tags: Union[_models.BlobTags, JSON, IO[bytes]], + *, + version_id: str, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param tags: The blob tags. Is one of the following types: BlobTags, JSON, IO[bytes] Required. + :type tags: ~azure.storage.blob.models.BlobTags or JSON or IO[bytes] + :keyword version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Required. + :paramtype version_id: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(tags, (IOBase, bytes)): + _content = tags + else: + _content = json.dumps(tags, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_set_tags_request( + container_name=container_name, + blob=blob, + version_id=version_id, + version=version, + timeout=timeout, + lease_id=lease_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + if_tags=if_tags, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`page_blob` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :paramtype blob_content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. + :paramtype tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_sequence_number: Optional. The sequence number is a user-controlled property + that you can use to track requests. The value of the sequence number must be between 0 and 2^63 + - 1. The default value is 0. Default value is None. + :paramtype blob_sequence_number: int + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_create_request( + container_name=container_name, + blob=blob, + content_length=content_length, + blob_content_length=blob_content_length, + version=version, + timeout=timeout, + tier=tier, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_sequence_number=blob_sequence_number, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def upload_pages( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_page_blob_upload_pages_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def clear_pages( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Clear Pages operation clears a range of pages from a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_clear_pages_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + timeout=timeout, + range=range, + lease_id=lease_id, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def upload_pages_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + range: str, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. Required. + :paramtype source_range: str + :keyword range: Bytes of source data in the specified range. The length of this range should + match the ContentLength header and x-ms-range/Range destination range header. Required. + :paramtype range: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. Default value is None. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. Default value is None. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. Default value is None. + :paramtype if_sequence_number_equal_to: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_upload_pages_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + source_url=source_url, + source_range=source_range, + range=range, + version=version, + timeout=timeout, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + lease_id=lease_id, + if_sequence_number_less_than_or_equal_to=if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=if_sequence_number_less_than, + if_sequence_number_equal_to=if_sequence_number_equal_to, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + copy_source_authorization=copy_source_authorization, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_page_ranges( + self, + container_name: str, + blob: str, + *, + version: str, + snapshot: Optional[str] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.PageList: + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: PageList. The PageList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.PageList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + + _request = build_page_blob_get_page_ranges_request( + container_name=container_name, + blob=blob, + version=version, + snapshot=snapshot, + range=range, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + marker=marker, + maxresults=maxresults, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PageList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_page_ranges_diff( + self, + container_name: str, + blob: str, + *, + prevsnapshot: str, + prev_snapshot_url: str, + version: str, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.PageList: + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob or + snapshot of a page blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is + a DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. Required. + :paramtype prevsnapshot: str + :keyword prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. Required. + :paramtype prev_snapshot_url: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword range: Return only the bytes of the blob in the specified range. Default value is + None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Default value is None. + :paramtype maxresults: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: PageList. The PageList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.PageList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + + _request = build_page_blob_get_page_ranges_diff_request( + container_name=container_name, + blob=blob, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + version=version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + marker=marker, + maxresults=maxresults, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PageList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def resize( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + blob_content_length: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Resize operation increases the size of the page blob to the specified size. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :paramtype blob_content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_resize_request( + container_name=container_name, + blob=blob, + blob_content_length=blob_content_length, + version=version, + timeout=timeout, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def update_sequence_number( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + sequence_number_action: Union[str, _models.SequenceNumberActionType], + blob_sequence_number: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Update Sequence Number operation sets the blob's sequence number. The operation will fail + if the specified sequence number is less than the current sequence number of the blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword sequence_number_action: Required if the x-ms-blob-sequence-number header is set for + the request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. Known values are: "increment", "max", and "update". + Required. + :paramtype sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :keyword blob_sequence_number: Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The value of the sequence number must + be between 0 and 2^63 - 1. Required. + :paramtype blob_sequence_number: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_update_sequence_number_request( + container_name=container_name, + blob=blob, + sequence_number_action=sequence_number_action, + blob_sequence_number=blob_sequence_number, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def copy_incremental( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + copy_source: str, + version: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_page_blob_copy_incremental_request( + container_name=container_name, + blob=blob, + copy_source=copy_source, + version=version, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + lease_id=lease_id, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`append_blob` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + version: str, + timeout: Optional[int] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new append blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_create_request( + container_name=container_name, + blob=blob, + content_length=content_length, + version=version, + timeout=timeout, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def append_block( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + content_length: int, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an append blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_append_blob_append_block_request( + container_name=container_name, + blob=blob, + content_length=content_length, + max_size=max_size, + append_position=append_position, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "int", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def append_block_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + source_url: str, + source_range: str, + max_size: int, + append_position: int, + version: str, + timeout: Optional[int] = None, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Append Block From URL operation creates a new block to be committed as part of an append + blob where the contents are read from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. Required. + :paramtype source_range: str + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_append_block_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + source_url=source_url, + source_range=source_range, + max_size=max_size, + append_position=append_position, + version=version, + timeout=timeout, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + copy_source_authorization=copy_source_authorization, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "int", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def seal( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + append_position: int, + version: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). Required. + :paramtype append_position: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_append_blob_seal_request( + container_name=container_name, + blob=blob, + append_position=append_position, + version=version, + timeout=timeout, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.BlobClient`'s + :attr:`block_blob` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def upload( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_block_blob_upload_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def put_blob_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + content_length: int, + copy_source: str, + version: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + blob_cache_control: Optional[str] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + source_content_md5: Optional[str] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword copy_source: Specifies the name of the source page blob snapshot. This value is a URL + of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as + it would appear in a request URI. The source blob must either be public or must be + authenticated via a shared access signature. Required. + :paramtype copy_source: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. Default value is None. + :paramtype source_if_tags: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. Default value is None. + :paramtype copy_source_blob_properties: bool + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :keyword copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :paramtype copy_source_tags: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_block_blob_put_blob_from_url_request( + container_name=container_name, + blob=blob, + content_length=content_length, + copy_source=copy_source, + version=version, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + blob_cache_control=blob_cache_control, + lease_id=lease_id, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + source_if_modified_since=source_if_modified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_tags=source_if_tags, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def stage_block( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + body: bytes, + *, + block_id: str, + content_length: int, + version: str, + transactional_content_md5: Optional[str] = None, + timeout: Optional[int] = None, + transactional_content_crc64: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param body: The body of the request. Required. + :type body: bytes + :keyword block_id: A valid Base64 string value that identifies the block. Prior to encoding, + the string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. Required. + :paramtype block_id: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True, format="base64") # type: ignore + + _request = build_block_blob_stage_block_request( + container_name=container_name, + blob=blob, + block_id=block_id, + content_length=content_length, + version=version, + transactional_content_md5=transactional_content_md5, + timeout=timeout, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def stage_block_from_url( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + *, + block_id: str, + content_length: int, + source_url: str, + source_range: str, + version: str, + source_content_md5: Optional[str] = None, + source_content_crc64: Optional[str] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Stage Block From URL operation creates a new block to be committed as part of a blob where + the contents are read from a URL. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword block_id: A valid Base64 string value that identifies the block. Prior to encoding, + the string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. Required. + :paramtype block_id: str + :keyword content_length: The length of the request. Required. + :paramtype content_length: int + :keyword source_url: Specify a URL to the copy source. Required. + :paramtype source_url: str + :keyword source_range: Bytes of source data in the specified range. Required. + :paramtype source_range: str + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword source_content_md5: Specify the md5 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_md5: str + :keyword source_content_crc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :paramtype source_content_crc64: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. Default value is None. + :paramtype source_if_none_match: str + :keyword copy_source_authorization: Only Bearer type is supported. Credentials should be a + valid OAuth access token to copy source. Default value is None. + :paramtype copy_source_authorization: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_block_blob_stage_block_from_url_request( + container_name=container_name, + blob=blob, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + version=version, + source_content_md5=source_content_md5, + source_content_crc64=source_content_crc64, + timeout=timeout, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + lease_id=lease_id, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + copy_source_authorization=copy_source_authorization, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @overload + def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: _models.BlockLookupList, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: JSON, + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: JSON + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: IO[bytes], + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Required. + :type blocks: IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + container_name: str, + blob: str, + blocks: Union[_models.BlockLookupList, JSON, IO[bytes]], + *, + version: str, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[str] = None, + transactional_content_md5: Optional[str] = None, + transactional_content_crc64: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[str] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTier]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_tags: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[str] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :param blocks: Blob Blocks. Is one of the following types: BlockLookupList, JSON, IO[bytes] + Required. + :type blocks: ~azure.storage.blob.models.BlockLookupList or JSON or IO[bytes] + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :paramtype timeout: int + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_type: str + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_language: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + Default value is None. + :paramtype blob_content_md5: str + :keyword transactional_content_md5: Optional. An MD5 hash of the blob content. Note that this + hash is not validated, as the hashes for the individual blocks were validated when each was + uploaded. Default value is None. + :paramtype transactional_content_md5: str + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: str + :keyword blob_content_disposition: Optional. Sets the blob's content disposition. If specified, + this property is stored with the blob and returned with a read request. Default value is None. + :paramtype blob_content_disposition: str + :keyword encryption_key: Optional. Version 2019-07-07 and later. Specifies the encryption key + to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: Optional. Version 2019-07-07 and later. Specifies the SHA256 + hash of the encryption key used to encrypt the data provided in the request. This header is + only used for encryption with a customer-provided key. If the request is authenticated with a + client token, this header should be specified using the SHA256 hash of the encryption key. + Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: Optional. Version 2019-07-07 and later. Specifies the + algorithm to use for encryption. If not specified, the default is AES256. Default value is + None. + :paramtype encryption_algorithm: str + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the encryption + scope to use to encrypt the data provided in the request. If not specified, the request will be + encrypted with the root account key. Default value is None. + :paramtype encryption_scope: str + :keyword tier: Optional. The tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", and "Archive". Default + value is None. + :paramtype tier: str or ~azure.storage.blob.models.AccessTier + :keyword if_modified_since: A date-time value. A request is made under the condition that the + resource has been modified since the specified date-time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A date-time value. A request is made under the condition that the + resource has not been modified since the specified date-time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :keyword blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :paramtype blob_tags_string: str + :keyword immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :paramtype immutability_policy_expiry: str + :keyword immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Locked", and "Unlocked". Default value is None. + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: Specified if a legal hold should be set on the blob. Default value is + None. + :paramtype legal_hold: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blocks, (IOBase, bytes)): + _content = blocks + else: + _content = json.dumps(blocks, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_block_blob_commit_block_list_request( + container_name=container_name, + blob=blob, + version=version, + timeout=timeout, + blob_cache_control=blob_cache_control, + blob_content_type=blob_content_type, + blob_content_encoding=blob_content_encoding, + blob_content_language=blob_content_language, + blob_content_md5=blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + blob_content_disposition=blob_content_disposition, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + encryption_scope=encryption_scope, + tier=tier, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags=if_tags, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + etag=etag, + match_condition=match_condition, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize("str", response.headers.get("x-ms-content-crc64")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_block_list( + self, + container_name: str, + blob: str, + *, + list_type: Union[str, _models.BlockListType], + version: str, + snapshot: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any + ) -> _models.BlockLookupList: + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param container_name: The name of the container. Required. + :type container_name: str + :param blob: The name of the blob. Required. + :type blob: str + :keyword list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Required. + :paramtype list_type: str or ~azure.storage.blob.models.BlockListType + :keyword version: Specifies the version of the operation to use for this request. Required. + :paramtype version: str + :keyword snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :paramtype snapshot: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. Default value is None. + :paramtype if_tags: str + :return: BlockLookupList. The BlockLookupList is compatible with MutableMapping + :rtype: ~azure.storage.blob.models.BlockLookupList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlockLookupList] = kwargs.pop("cls", None) + + _request = build_block_blob_get_block_list_request( + container_name=container_name, + blob=blob, + list_type=list_type, + version=version, + snapshot=snapshot, + lease_id=lease_id, + if_tags=if_tags, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _deserialize(_models.StorageError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BlockLookupList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/py.typed b/sdk/storage/azure-storage-blob/azure/storage/blob/py.typed index e69de29bb2d1..e5aff4f83af8 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/py.typed +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py b/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py index 5b8b9b02a2a4..a0f79036c22b 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py @@ -25,37 +25,40 @@ import os import sys + class AuthSamples(object): - url = "https://{}.blob.core.windows.net".format( - os.getenv("AZURE_STORAGE_ACCOUNT_NAME") - ) - oauth_url = "https://{}.blob.core.windows.net".format( - os.getenv("OAUTH_STORAGE_ACCOUNT_NAME") - ) + url = "https://{}.blob.core.windows.net".format(os.getenv("AZURE_STORAGE_ACCOUNT_NAME")) + oauth_url = "https://{}.blob.core.windows.net".format(os.getenv("OAUTH_STORAGE_ACCOUNT_NAME")) connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") shared_access_key = os.getenv("AZURE_STORAGE_ACCESS_KEY") def auth_connection_string(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: auth_connection_string") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: auth_connection_string" + ) sys.exit(1) # [START auth_from_connection_string] from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [END auth_from_connection_string] # [START auth_from_connection_string_container] from azure.storage.blob import ContainerClient - container_client = ContainerClient.from_connection_string( - self.connection_string, container_name="mycontainer") + + container_client = ContainerClient.from_connection_string(self.connection_string, container_name="mycontainer") # [END auth_from_connection_string_container] # [START auth_from_connection_string_blob] from azure.storage.blob import BlobClient + blob_client = BlobClient.from_connection_string( - self.connection_string, container_name="mycontainer", blob_name="blobname.txt") + self.connection_string, container_name="mycontainer", blob_name="blobname.txt" + ) # [END auth_from_connection_string_blob] # Get account information for the Blob Service @@ -63,11 +66,11 @@ def auth_connection_string(self): def auth_shared_key(self): if self.shared_access_key is None: - print("Missing required environment variable: AZURE_STORAGE_ACCESS_KEY." + '\n' + - "Test: auth_shared_key") + print("Missing required environment variable: AZURE_STORAGE_ACCESS_KEY." + "\n" + "Test: auth_shared_key") sys.exit(1) # [START create_blob_service_client] from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient(account_url=self.url, credential=self.shared_access_key) # [END create_blob_service_client] @@ -77,6 +80,7 @@ def auth_shared_key(self): def auth_blob_url(self): # [START create_blob_client] from azure.storage.blob import BlobClient + blob_client = BlobClient.from_blob_url(blob_url="https://account.blob.core.windows.net/container/blob-name") # [END create_blob_client] @@ -89,11 +93,15 @@ def auth_blob_url(self): def auth_shared_access_signature(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: auth_shared_access_signature") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: auth_shared_access_signature" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START create_sas_token] @@ -106,7 +114,7 @@ def auth_shared_access_signature(self): account_key=blob_service_client.credential.account_key, resource_types=ResourceTypes(object=True), permission=AccountSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # [END create_sas_token] @@ -118,20 +126,20 @@ def auth_default_azure_credential(self): # Alternately, one can specify the AZURE_TENANT_ID, AZURE_CLIENT_ID, and AZURE_CLIENT_SECRET to use the EnvironmentCredentialClass. # The docs above specify all mechanisms which the defaultCredential internally support. from azure.identity import DefaultAzureCredential + default_credential = DefaultAzureCredential() # Instantiate a BlobServiceClient using a token credential from azure.storage.blob import BlobServiceClient - blob_service_client = BlobServiceClient( - account_url=self.oauth_url, - credential=default_credential - ) + + blob_service_client = BlobServiceClient(account_url=self.oauth_url, credential=default_credential) # [END create_blob_service_client_oauth] # Get account information for the Blob Service account_info = blob_service_client.get_service_properties() -if __name__ == '__main__': + +if __name__ == "__main__": sample = AuthSamples() sample.auth_connection_string() sample.auth_shared_access_signature() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py index 13d2cbefc252..8c1f309a5183 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py @@ -27,52 +27,60 @@ import sys import asyncio + class AuthSamplesAsync(object): - url = "https://{}.blob.core.windows.net".format( - os.getenv("AZURE_STORAGE_ACCOUNT_NAME") - ) - oauth_url = "https://{}.blob.core.windows.net".format( - os.getenv("OAUTH_STORAGE_ACCOUNT_NAME") - ) + url = "https://{}.blob.core.windows.net".format(os.getenv("AZURE_STORAGE_ACCOUNT_NAME")) + oauth_url = "https://{}.blob.core.windows.net".format(os.getenv("OAUTH_STORAGE_ACCOUNT_NAME")) connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") shared_access_key = os.getenv("AZURE_STORAGE_ACCESS_KEY") async def auth_connection_string_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: auth_connection_string_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: auth_connection_string_async" + ) sys.exit(1) # [START auth_from_connection_string] from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [END auth_from_connection_string] # [START auth_from_connection_string_container] from azure.storage.blob.aio import ContainerClient - container_client = ContainerClient.from_connection_string( - self.connection_string, container_name="mycontainer") + + container_client = ContainerClient.from_connection_string(self.connection_string, container_name="mycontainer") # [END auth_from_connection_string_container] # [START auth_from_connection_string_blob] from azure.storage.blob.aio import BlobClient + blob_client = BlobClient.from_connection_string( - self.connection_string, container_name="mycontainer", blob_name="blobname.txt") + self.connection_string, container_name="mycontainer", blob_name="blobname.txt" + ) # [END auth_from_connection_string_blob] async def auth_shared_key_async(self): if self.shared_access_key is None: - print("Missing required environment variable: AZURE_STORAGE_ACCESS_KEY." + '\n' + - "Test: auth_shared_key_async") + print( + "Missing required environment variable: AZURE_STORAGE_ACCESS_KEY." + + "\n" + + "Test: auth_shared_key_async" + ) sys.exit(1) # [START create_blob_service_client] from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient(account_url=self.url, credential=self.shared_access_key) # [END create_blob_service_client] async def auth_blob_url_async(self): # [START create_blob_client] from azure.storage.blob.aio import BlobClient + blob_client = BlobClient.from_blob_url(blob_url="https://account.blob.core.windows.net/container/blob-name") # [END create_blob_client] @@ -85,11 +93,15 @@ async def auth_blob_url_async(self): async def auth_shared_access_signature_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: auth_shared_access_signature_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: auth_shared_access_signature_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START create_sas_token] @@ -102,7 +114,7 @@ async def auth_shared_access_signature_async(self): account_key=blob_service_client.credential.account_key, resource_types=ResourceTypes(object=True), permission=AccountSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # [END create_sas_token] @@ -114,19 +126,19 @@ async def auth_default_azure_credential(self): # Alternately, one can specify the AZURE_TENANT_ID, AZURE_CLIENT_ID, and AZURE_CLIENT_SECRET to use the EnvironmentCredentialClass. # The docs above specify all mechanisms which the defaultCredential internally support. from azure.identity.aio import DefaultAzureCredential + default_credential = DefaultAzureCredential() # Instantiate a BlobServiceClient using a token credential from azure.storage.blob.aio import BlobServiceClient - blob_service_client = BlobServiceClient( - account_url=self.oauth_url, - credential=default_credential - ) + + blob_service_client = BlobServiceClient(account_url=self.oauth_url, credential=default_credential) # [END create_blob_service_client_oauth] # Get account information for the Blob Service account_info = await blob_service_client.get_service_properties() + async def main(): sample = AuthSamplesAsync() await sample.auth_connection_string_async() @@ -134,5 +146,6 @@ async def main(): await sample.auth_blob_url_async() await sample.auth_default_azure_credential() -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_batch_delete_blobs.py b/sdk/storage/azure-storage-blob/samples/blob_samples_batch_delete_blobs.py index 9636cc34023e..79b8e8cc005b 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_batch_delete_blobs.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_batch_delete_blobs.py @@ -18,11 +18,14 @@ def batch_delete_blobs_sample(local_path): # Set the connection string and container name values to initialize the Container Client - connection_string = os.getenv('AZURE_STORAGE_CONNECTION_STRING') + connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") if connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: batch_delete_blobs_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: batch_delete_blobs_sample" + ) sys.exit(1) blob_service_client = BlobServiceClient.from_connection_string(conn_str=connection_string) @@ -34,7 +37,7 @@ def batch_delete_blobs_sample(local_path): pass # Upload blobs for filename in os.listdir(local_path): - with open(local_path+filename, "rb") as data: + with open(local_path + filename, "rb") as data: container_client.upload_blob(name=filename, data=data, blob_type="BlockBlob") # List blobs in storage account @@ -43,6 +46,6 @@ def batch_delete_blobs_sample(local_path): # Delete blobs container_client.delete_blobs(*blob_list) -if __name__ == '__main__': - batch_delete_blobs_sample(SOURCE_FOLDER) +if __name__ == "__main__": + batch_delete_blobs_sample(SOURCE_FOLDER) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption.py b/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption.py index 83a0b386ea00..0c7f842a0e80 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption.py @@ -46,20 +46,20 @@ class KeyWrapper: def __init__(self, kid): self.kek = os.urandom(32) self.backend = default_backend() - self.kid = 'local:' + kid + self.kid = "local:" + kid - def wrap_key(self, key, algorithm='A256KW'): - if algorithm == 'A256KW': + def wrap_key(self, key, algorithm="A256KW"): + if algorithm == "A256KW": return aes_key_wrap(self.kek, key, self.backend) - raise ValueError('Unknown key wrap algorithm.') + raise ValueError("Unknown key wrap algorithm.") def unwrap_key(self, key, algorithm): - if algorithm == 'A256KW': + if algorithm == "A256KW": return aes_key_unwrap(self.kek, key, self.backend) - raise ValueError('Unknown key wrap algorithm.') + raise ValueError("Unknown key wrap algorithm.") def get_key_wrap_algorithm(self): - return 'A256KW' + return "A256KW" def get_kid(self): return self.kid @@ -78,40 +78,32 @@ def resolve_key(self, kid): class RSAKeyWrapper: def __init__(self, kid): - self.private_key = generate_private_key(public_exponent=65537, - key_size=2048, - backend=default_backend()) + self.private_key = generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) self.public_key = self.private_key.public_key() - self.kid = 'local:' + kid - - def wrap_key(self, key, algorithm='RSA'): - if algorithm == 'RSA': - return self.public_key.encrypt(key, - OAEP( - mgf=MGF1(algorithm=SHA1()), # nosec - algorithm=SHA1(), # nosec - label=None) - ) - raise ValueError('Unknown key wrap algorithm.') + self.kid = "local:" + kid + + def wrap_key(self, key, algorithm="RSA"): + if algorithm == "RSA": + return self.public_key.encrypt( + key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + ) + raise ValueError("Unknown key wrap algorithm.") def unwrap_key(self, key, algorithm): - if algorithm == 'RSA': - return self.private_key.decrypt(key, - OAEP( - mgf=MGF1(algorithm=SHA1()), # nosec - algorithm=SHA1(), # nosec - label=None) - ) - raise ValueError('Unknown key wrap algorithm.') + if algorithm == "RSA": + return self.private_key.decrypt( + key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + ) + raise ValueError("Unknown key wrap algorithm.") def get_key_wrap_algorithm(self): - return 'RSA' + return "RSA" def get_kid(self): return self.kid -class BlobEncryptionSamples(): +class BlobEncryptionSamples: def __init__(self, bsc: BlobServiceClient): self.bsc = bsc @@ -123,12 +115,12 @@ def run_all_samples(self): self.alternate_key_algorithms() def _get_resource_reference(self, prefix: str) -> str: - return '{}{}'.format(prefix, str(uuid.uuid4()).replace('-', '')) + return "{}{}".format(prefix, str(uuid.uuid4()).replace("-", "")) - def _get_blob_reference(self, prefix: str = 'blob') -> str: + def _get_blob_reference(self, prefix: str = "blob") -> str: return self._get_resource_reference(prefix) - def _create_container(self, prefix: str = 'container') -> str: + def _create_container(self, prefix: str = "container") -> str: container_name = self._get_resource_reference(prefix) self.container_client = self.bsc.get_container_client(container_name) self.container_client.create_container() @@ -137,32 +129,32 @@ def _create_container(self, prefix: str = 'container') -> str: def put_encrypted_blob(self): self._create_container() try: - block_blob_name = self._get_blob_reference(prefix='block_blob_') + block_blob_name = self._get_blob_reference(prefix="block_blob_") # KeyWrapper implements the key encryption key interface. Setting # this property will tell the service to encrypt the blob. Blob encryption # is supported only for uploading whole blobs and only at the time of creation. - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.container_client.key_encryption_key = kek - self.container_client.encryption_version = '2.0' + self.container_client.encryption_version = "2.0" - self.container_client.upload_blob(block_blob_name, b'ABC') + self.container_client.upload_blob(block_blob_name, b"ABC") - # Even when encrypting, uploading large blobs will still automatically + # Even when encrypting, uploading large blobs will still automatically # chunk the data. max_single_put_size = self.bsc._config.max_single_put_size - self.container_client.upload_blob(block_blob_name, b'ABC' * max_single_put_size, overwrite=True) + self.container_client.upload_blob(block_blob_name, b"ABC" * max_single_put_size, overwrite=True) finally: self.container_client.delete_container() def get_encrypted_blob(self): self._create_container() try: - block_blob_name = self._get_blob_reference(prefix='block_blob') + block_blob_name = self._get_blob_reference(prefix="block_blob") - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.container_client.key_encryption_key = kek - self.container_client.encryption_version = '2.0' + self.container_client.encryption_version = "2.0" data = os.urandom(13 * self.bsc._config.max_single_put_size + 1) self.container_client.upload_blob(block_blob_name, data) @@ -178,25 +170,24 @@ def get_encrypted_blob(self): # and decrypting range gets. block_blob_client = self.container_client.get_blob_client(block_blob_name) blob_full = block_blob_client.download_blob().readall() - blob_range = block_blob_client.download_blob(offset=len(data) // 2, - length=len(data) // 4).readall() + blob_range = block_blob_client.download_blob(offset=len(data) // 2, length=len(data) // 4).readall() finally: self.container_client.delete_container() def get_encrypted_blob_key_encryption_key(self): self._create_container() try: - block_blob_name = self._get_blob_reference(prefix='block_blob') + block_blob_name = self._get_blob_reference(prefix="block_blob") - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.container_client.key_encryption_key = kek - self.container_client.encryption_version = '2.0' + self.container_client.encryption_version = "2.0" - data = b'ABC' + data = b"ABC" self.container_client.upload_blob(block_blob_name, data) # If the key_encryption_key property is set on download, the blobservice - # will try to decrypt blobs using that key. If both the key_resolver and + # will try to decrypt blobs using that key. If both the key_resolver and # key_encryption_key are set, the result of the key_resolver will take precedence # and the decryption will fail if that key is not successful. self.container_client.key_resolver_function = None @@ -207,18 +198,18 @@ def get_encrypted_blob_key_encryption_key(self): def require_encryption(self): self._create_container() try: - encrypted_blob_name = self._get_blob_reference(prefix='block_blob_') - unencrypted_blob_name = self._get_blob_reference(prefix='unencrypted_blob_') + encrypted_blob_name = self._get_blob_reference(prefix="block_blob_") + unencrypted_blob_name = self._get_blob_reference(prefix="unencrypted_blob_") self.container_client.key_encryption_key = None self.container_client.key_resolver_function = None self.container_client.require_encryption = False - self.container_client.encryption_version = '2.0' + self.container_client.encryption_version = "2.0" - data = b'ABC' + data = b"ABC" self.container_client.upload_blob(unencrypted_blob_name, data) - # If the require_encryption flag is set, the service object will throw if + # If the require_encryption flag is set, the service object will throw if # there is no encryption policy set on upload. self.container_client.require_encryption = True try: @@ -229,7 +220,7 @@ def require_encryption(self): # If the require_encryption flag is set, the service object will throw if # there is no encryption policy set on download. - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") key_resolver = KeyResolver() key_resolver.put_key(kek) @@ -257,25 +248,26 @@ def require_encryption(self): def alternate_key_algorithms(self): self._create_container() try: - block_blob_name = self._get_blob_reference(prefix='block_blob') + block_blob_name = self._get_blob_reference(prefix="block_blob") # The key wrapping algorithm used by the key_encryption_key # is entirely up to the choice of the user. For example, # RSA may be used. - kek = RSAKeyWrapper('key2') + kek = RSAKeyWrapper("key2") key_resolver = KeyResolver() key_resolver.put_key(kek) self.container_client.key_encryption_key = kek self.container_client.key_resolver_function = key_resolver.resolve_key - self.container_client.encryption_version = '2.0' + self.container_client.encryption_version = "2.0" - self.container_client.upload_blob(block_blob_name, b'ABC') + self.container_client.upload_blob(block_blob_name, b"ABC") blob = self.container_client.get_blob_client(block_blob_name).download_blob().readall() finally: self.container_client.delete_container() + try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption_keyvault.py b/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption_keyvault.py index f2b6a10bde9b..760cdd45d31d 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption_keyvault.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_client_side_encryption_keyvault.py @@ -41,25 +41,28 @@ from azure.storage.blob import BlobServiceClient # Environment variable keys which must be set to run this sample -STORAGE_URL = 'AZURE_STORAGE_ACCOUNT_URL' -KEYVAULT_URL = 'AZURE_KEYVAULT_DNS_NAME' -CLIENT_ID = 'ACTIVE_DIRECTORY_APPLICATION_ID' -CLIENT_SECRET = 'ACTIVE_DIRECTORY_APPLICATION_SECRET' -TENANT_ID = 'ACTIVE_DIRECTORY_TENANT_ID' +STORAGE_URL = "AZURE_STORAGE_ACCOUNT_URL" +KEYVAULT_URL = "AZURE_KEYVAULT_DNS_NAME" +CLIENT_ID = "ACTIVE_DIRECTORY_APPLICATION_ID" +CLIENT_SECRET = "ACTIVE_DIRECTORY_APPLICATION_SECRET" +TENANT_ID = "ACTIVE_DIRECTORY_TENANT_ID" + def get_env_var(key): try: return os.environ[key] except KeyError: - print('{} must be set.'.format(key)) + print("{} must be set.".format(key)) sys.exit(1) + def make_resource_name(prefix): - return '{}{}'.format(prefix, str(uuid.uuid4()).replace('-', '')) + return "{}{}".format(prefix, str(uuid.uuid4()).replace("-", "")) + class KeyWrapper: - """ Class that fulfills the interface used by the storage SDK's - automatic client-side encyrption and decryption routines. """ + """Class that fulfills the interface used by the storage SDK's + automatic client-side encyrption and decryption routines.""" def __init__(self, kek, credential): self.algorithm = KeyWrapAlgorithm.aes_256 @@ -69,13 +72,13 @@ def __init__(self, kek, credential): def wrap_key(self, key): if self.algorithm != KeyWrapAlgorithm.aes_256: - raise ValueError('Unknown key wrap algorithm. {}'.format(self.algorithm)) + raise ValueError("Unknown key wrap algorithm. {}".format(self.algorithm)) wrapped = self.client.wrap_key(key=key, algorithm=self.algorithm) return wrapped.encrypted_key def unwrap_key(self, key, _): if self.algorithm != KeyWrapAlgorithm.aes_256: - raise ValueError('Unknown key wrap algorithm. {}'.format(self.algorithm)) + raise ValueError("Unknown key wrap algorithm. {}".format(self.algorithm)) unwrapped = self.client.unwrap_key(encrypted_key=key, algorithm=self.algorithm) return unwrapped.key @@ -85,6 +88,7 @@ def get_key_wrap_algorithm(self): def get_kid(self): return self.kid + # Retrieve sensitive data from environment variables storage_url = get_env_var(STORAGE_URL) keyvault_url = get_env_var(KEYVAULT_URL) @@ -94,21 +98,21 @@ def get_kid(self): secret_client = SecretClient(keyvault_url, credential=credential) # The secret is url-safe base64 encoded bytes, content type 'application/octet-stream' -secret = secret_client.get_secret('symmetric-key') +secret = secret_client.get_secret("symmetric-key") key_bytes = base64.urlsafe_b64decode(secret.value) -kvk = KeyVaultKey(key_id=secret.id, key_ops=['unwrapKey', 'wrapKey'], k=key_bytes, kty=KeyType.oct) +kvk = KeyVaultKey(key_id=secret.id, key_ops=["unwrapKey", "wrapKey"], k=key_bytes, kty=KeyType.oct) kek = KeyWrapper(kvk, credential) storage_client = BlobServiceClient(storage_url, credential=credential) -container_name = make_resource_name('container') -blob_name = make_resource_name('blob') +container_name = make_resource_name("container") +blob_name = make_resource_name("blob") container_client = storage_client.get_container_client(container_name) container_client.key_encryption_key = kek -container_client.encryption_version = '2.0' +container_client.encryption_version = "2.0" container_client.create_container() try: - container_client.upload_blob(blob_name, 'This is my blob.') + container_client.upload_blob(blob_name, "This is my blob.") # Download without decrypting container_client.key_encryption_key = None diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_common.py b/sdk/storage/azure-storage-blob/samples/blob_samples_common.py index df674ffda1bf..85c431ecce52 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_common.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_common.py @@ -23,23 +23,27 @@ from azure.core.exceptions import HttpResponseError, ResourceExistsError from azure.storage.blob import BlobServiceClient -SOURCE_FILE = 'SampleSource.txt' +SOURCE_FILE = "SampleSource.txt" class CommonBlobSamples(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") - #--Begin Blob Samples----------------------------------------------------------------- + # --Begin Blob Samples----------------------------------------------------------------- def blob_snapshots(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_snapshots") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_snapshots" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -63,7 +67,7 @@ def blob_snapshots(self): snapshot_blob = blob_client.create_snapshot() # Get the snapshot ID - print(snapshot_blob.get('snapshot')) + print(snapshot_blob.get("snapshot")) # [END create_blob_snapshot] # Delete only the snapshot (blob itself is retained) @@ -74,16 +78,21 @@ def blob_snapshots(self): def soft_delete_and_undelete_blob(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: soft_delete_and_undelete_blob") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: soft_delete_and_undelete_blob" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Create a retention policy to retain deleted blobs from azure.storage.blob import RetentionPolicy + delete_retention_policy = RetentionPolicy(enabled=True, days=1) # Set the retention policy on the service @@ -120,11 +129,15 @@ def soft_delete_and_undelete_blob(self): def delete_multiple_blobs(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: delete_multiple_blobs") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: delete_multiple_blobs" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -157,12 +170,16 @@ def delete_multiple_blobs(self): def acquire_lease_on_blob(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: acquire_lease_on_blob") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: acquire_lease_on_blob" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -194,11 +211,15 @@ def acquire_lease_on_blob(self): def start_copy_blob_from_url_and_abort_copy(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: start_copy_blob_from_url_and_abort_copy") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: start_copy_blob_from_url_and_abort_copy" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -214,7 +235,7 @@ def start_copy_blob_from_url_and_abort_copy(self): # [START copy_blob_from_url] # Get the blob client with the source blob source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = blob_service_client.get_blob_client("copyblobcontainer", '59466-0.txt') + copied_blob = blob_service_client.get_blob_client("copyblobcontainer", "59466-0.txt") # start copy and check copy status copy = copied_blob.start_copy_from_url(source_blob) @@ -239,7 +260,8 @@ def start_copy_blob_from_url_and_abort_copy(self): finally: blob_service_client.delete_container("copyblobcontainer") -if __name__ == '__main__': + +if __name__ == "__main__": sample = CommonBlobSamples() sample.blob_snapshots() sample.soft_delete_and_undelete_blob() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py index 18f6343d3713..be57ddf47044 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py @@ -23,22 +23,26 @@ from azure.core.exceptions import ResourceExistsError -SOURCE_FILE = './SampleSource.txt' +SOURCE_FILE = "./SampleSource.txt" class CommonBlobSamplesAsync(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") - #--Begin Blob Samples----------------------------------------------------------------- + # --Begin Blob Samples----------------------------------------------------------------- async def blob_snapshots_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_snapshots_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_snapshots_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -63,7 +67,7 @@ async def blob_snapshots_async(self): snapshot_blob = await blob_client.create_snapshot() # Get the snapshot ID - print(snapshot_blob.get('snapshot')) + print(snapshot_blob.get("snapshot")) # Delete only the snapshot (blob itself is retained) await blob_client.delete_blob(delete_snapshots="only") @@ -74,16 +78,21 @@ async def blob_snapshots_async(self): async def soft_delete_and_undelete_blob_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: soft_delete_and_undelete_blob_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: soft_delete_and_undelete_blob_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: # Create a retention policy to retain deleted blobs from azure.storage.blob import RetentionPolicy + delete_retention_policy = RetentionPolicy(enabled=True, days=1) # Set the retention policy on the service @@ -120,11 +129,15 @@ async def soft_delete_and_undelete_blob_async(self): async def delete_multiple_blobs_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: delete_multiple_blobs_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: delete_multiple_blobs_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -150,7 +163,9 @@ async def delete_multiple_blobs_async(self): # Delete multiple blobs by properties iterator my_blobs = container_client.list_blobs(name_starts_with="my_blob") - await container_client.delete_blobs(*[b async for b in my_blobs]) # async for in list comprehension after 3.6 only + await container_client.delete_blobs( + *[b async for b in my_blobs] + ) # async for in list comprehension after 3.6 only # [END delete_multiple_blobs] # Delete container @@ -158,11 +173,15 @@ async def delete_multiple_blobs_async(self): async def acquire_lease_on_blob_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: acquire_lease_on_blob_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: acquire_lease_on_blob_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -195,11 +214,15 @@ async def acquire_lease_on_blob_async(self): async def start_copy_blob_from_url_and_abort_copy_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: start_copy_blob_from_url_and_abort_copy_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: start_copy_blob_from_url_and_abort_copy_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -216,7 +239,7 @@ async def start_copy_blob_from_url_and_abort_copy_async(self): # [START copy_blob_from_url] # Get the blob client with the source blob source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = blob_service_client.get_blob_client("copyblobcontainerasync", '59466-0.txt') + copied_blob = blob_service_client.get_blob_client("copyblobcontainerasync", "59466-0.txt") # start copy and check copy status copy = await copied_blob.start_copy_from_url(source_blob) @@ -241,6 +264,7 @@ async def start_copy_blob_from_url_and_abort_copy_async(self): finally: await blob_service_client.delete_container("copyblobcontainerasync") + async def main(): sample = CommonBlobSamplesAsync() await sample.blob_snapshots_async() @@ -249,5 +273,6 @@ async def main(): await sample.acquire_lease_on_blob_async() await sample.start_copy_blob_from_url_and_abort_copy_async() -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py b/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py index c90a8323a128..406521f90af1 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py @@ -36,11 +36,12 @@ from azure.storage.blob import AccessPolicy, BlobServiceClient, ContainerSasPermissions, PublicAccess try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) + def get_and_set_container_access_policy(): service_client = BlobServiceClient.from_connection_string(CONNECTION_STRING) container_client = service_client.get_container_client("mynewconwertainer") @@ -49,11 +50,13 @@ def get_and_set_container_access_policy(): container_client.create_container() # Create access policy - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True, write=True), - expiry=datetime.utcnow() + timedelta(hours=1), - start=datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True, write=True), + expiry=datetime.utcnow() + timedelta(hours=1), + start=datetime.utcnow() - timedelta(minutes=1), + ) - identifiers = {'read': access_policy} + identifiers = {"read": access_policy} # Specifies full public read access for container and blob data. public_access = PublicAccess.CONTAINER @@ -72,7 +75,7 @@ def get_and_set_container_access_policy(): print("\n..Getting container access policy") access_policy_dict = container_client.get_container_access_policy() print(f"Blob Access Type: {access_policy_dict['public_access']}") - for identifier in access_policy_dict['signed_identifiers']: + for identifier in access_policy_dict["signed_identifiers"]: print(f"Identifier '{identifier.id}' has permissions '{identifier.access_policy.permission}''") diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py index 28f23e475ccc..728759f69c58 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py @@ -37,11 +37,12 @@ from azure.storage.blob.aio import BlobServiceClient try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) + async def get_and_set_container_access_policy(): service_client = BlobServiceClient.from_connection_string(CONNECTION_STRING) container_client = service_client.get_container_client("mynewcontainer") @@ -54,11 +55,13 @@ async def get_and_set_container_access_policy(): pass # Create access policy - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True, write=True), - expiry=datetime.utcnow() + timedelta(hours=1), - start=datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True, write=True), + expiry=datetime.utcnow() + timedelta(hours=1), + start=datetime.utcnow() - timedelta(minutes=1), + ) - identifiers = {'read': access_policy} + identifiers = {"read": access_policy} # Specifies full public read access for container and blob data. public_access = PublicAccess.CONTAINER @@ -77,7 +80,7 @@ async def get_and_set_container_access_policy(): print("\n..Getting container access policy") access_policy_dict = await container_client.get_container_access_policy() print(f"Blob Access Type: {access_policy_dict['public_access']}") - for identifier in access_policy_dict['signed_identifiers']: + for identifier in access_policy_dict["signed_identifiers"]: print(f"Identifier '{identifier.id}' has permissions '{identifier.access_policy.permission}''") @@ -88,5 +91,6 @@ async def main(): print(error) sys.exit(1) -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py b/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py index d8343d819f86..72492c8b3db9 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py @@ -23,24 +23,28 @@ from azure.core.exceptions import ResourceExistsError -SOURCE_FILE = 'SampleSource.txt' +SOURCE_FILE = "SampleSource.txt" class ContainerSamples(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") - #--Begin Blob Samples----------------------------------------------------------------- + # --Begin Blob Samples----------------------------------------------------------------- def container_sample(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_sample" + ) sys.exit(1) # [START create_container_client_from_service] # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -70,12 +74,16 @@ def container_sample(self): def acquire_lease_on_container(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: acquire_lease_on_container") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: acquire_lease_on_container" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -97,12 +105,16 @@ def acquire_lease_on_container(self): def set_metadata_on_container(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: set_metadata_on_container") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: set_metadata_on_container" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -114,7 +126,7 @@ def set_metadata_on_container(self): # [START set_container_metadata] # Create key, value pairs for metadata - metadata = {'type': 'test'} + metadata = {"type": "test"} # Set metadata on the container container_client.set_container_metadata(metadata=metadata) @@ -129,11 +141,15 @@ def set_metadata_on_container(self): def container_access_policy(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_access_policy") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_access_policy" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -146,11 +162,14 @@ def container_access_policy(self): # [START set_container_access_policy] # Create access policy from azure.storage.blob import AccessPolicy, ContainerSasPermissions - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1), - start=datetime.utcnow() - timedelta(minutes=1)) - identifiers = {'test': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + start=datetime.utcnow() - timedelta(minutes=1), + ) + + identifiers = {"test": access_policy} # Set the access policy on the container container_client.set_container_access_policy(signed_identifiers=identifiers) @@ -168,16 +187,16 @@ def container_access_policy(self): container_client.account_name, container_client.container_name, account_key=container_client.credential.account_key, - policy_id='my-access-policy-id' + policy_id="my-access-policy-id", ) # [END generate_sas_token] # Use the sas token to authenticate a new client # [START create_container_client_sastoken] from azure.storage.blob import ContainerClient + container = ContainerClient.from_container_url( - container_url="https://account.blob.core.windows.net/mycontainer", - credential=sas_token + container_url="https://account.blob.core.windows.net/mycontainer", credential=sas_token ) # [END create_container_client_sastoken] @@ -187,12 +206,16 @@ def container_access_policy(self): def list_blobs_in_container(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: list_blobs_in_container") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: list_blobs_in_container" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -211,7 +234,7 @@ def list_blobs_in_container(self): # [START list_blobs_in_container] blobs_list = container_client.list_blobs() for blob in blobs_list: - print(blob.name + '\n') + print(blob.name + "\n") # [END list_blobs_in_container] # Delete container @@ -219,12 +242,16 @@ def list_blobs_in_container(self): def get_blob_client_from_container(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_client_from_container") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_client_from_container" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -246,11 +273,15 @@ def get_blob_client_from_container(self): def get_container_client_from_blob_client(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_container_client_from_blob_client") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_container_client_from_blob_client" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START get_container_client_from_blob_client] @@ -266,7 +297,7 @@ def get_container_client_from_blob_client(self): # [END get_container_client_from_blob_client] -if __name__ == '__main__': +if __name__ == "__main__": sample = ContainerSamples() sample.container_sample() sample.acquire_lease_on_container() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py index c19e685ba4b0..d64ff2d7b601 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py @@ -22,7 +22,8 @@ import asyncio from datetime import datetime, timedelta -SOURCE_FILE = 'SampleSource.txt' +SOURCE_FILE = "SampleSource.txt" + class ContainerSamplesAsync(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") @@ -31,13 +32,17 @@ class ContainerSamplesAsync(object): async def container_sample_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_sample_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_sample_async" + ) sys.exit(1) # [START create_container_client_from_service] # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a ContainerClient @@ -48,7 +53,9 @@ async def container_sample_async(self): # [START create_container_client_sasurl] from azure.storage.blob.aio import ContainerClient - sas_url = sas_url = "https://account.blob.core.windows.net/mycontainer?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" + sas_url = sas_url = ( + "https://account.blob.core.windows.net/mycontainer?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" + ) container = ContainerClient.from_container_url(sas_url) # [END create_container_client_sasurl] @@ -68,12 +75,16 @@ async def container_sample_async(self): async def acquire_lease_on_container_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: acquire_lease_on_container_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: acquire_lease_on_container_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -93,12 +104,16 @@ async def acquire_lease_on_container_async(self): async def set_metadata_on_container_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: set_metadata_on_container_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: set_metadata_on_container_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -111,7 +126,7 @@ async def set_metadata_on_container_async(self): # [START set_container_metadata] # Create key, value pairs for metadata - metadata = {'type': 'test'} + metadata = {"type": "test"} # Set metadata on the container await container_client.set_container_metadata(metadata=metadata) @@ -126,11 +141,15 @@ async def set_metadata_on_container_async(self): async def container_access_policy_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_access_policy_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_access_policy_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -144,11 +163,14 @@ async def container_access_policy_async(self): # [START set_container_access_policy] # Create access policy from azure.storage.blob import AccessPolicy, ContainerSasPermissions - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1), - start=datetime.utcnow() - timedelta(minutes=1)) - identifiers = {'my-access-policy-id': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + start=datetime.utcnow() - timedelta(minutes=1), + ) + + identifiers = {"my-access-policy-id": access_policy} # Set the access policy on the container await container_client.set_container_access_policy(signed_identifiers=identifiers) @@ -166,13 +188,14 @@ async def container_access_policy_async(self): container_client.account_name, container_client.container_name, account_key=container_client.credential.account_key, - policy_id='my-access-policy-id' + policy_id="my-access-policy-id", ) # [END generate_sas_token] # Use the sas token to authenticate a new client # [START create_container_client_sastoken] from azure.storage.blob.aio import ContainerClient + container = ContainerClient.from_container_url( container_url="https://account.blob.core.windows.net/mycontainerasync", credential=sas_token, @@ -185,12 +208,16 @@ async def container_access_policy_async(self): async def list_blobs_in_container_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: list_blobs_in_container_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: list_blobs_in_container_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -218,12 +245,16 @@ async def list_blobs_in_container_async(self): async def get_blob_client_from_container_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_client_from_container_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_client_from_container_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -243,11 +274,15 @@ async def get_blob_client_from_container_async(self): async def get_container_client_from_blob_client_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_container_client_from_blob_client_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_container_client_from_blob_client_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -274,5 +309,6 @@ async def main(): await sample.get_blob_client_from_container_async() await sample.get_container_client_from_blob_client_async() -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob.py b/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob.py index e970f10c98ee..af4c2bebfd4e 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob.py @@ -21,9 +21,10 @@ import time from azure.storage.blob import BlobServiceClient + def main(): try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") @@ -32,7 +33,7 @@ def main(): status = None blob_service_client = BlobServiceClient.from_connection_string(CONNECTION_STRING) source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = blob_service_client.get_blob_client("mycontainer", '59466-0.txt') + copied_blob = blob_service_client.get_blob_client("mycontainer", "59466-0.txt") # Copy started copied_blob.start_copy_from_url(source_blob) for i in range(10): @@ -60,5 +61,6 @@ def main(): props = copied_blob.get_blob_properties() print(props.copy.status) + if __name__ == "__main__": main() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob_async.py index ca4e21a416a4..8fbe5f12546b 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_copy_blob_async.py @@ -22,9 +22,10 @@ import time from azure.storage.blob.aio import BlobServiceClient + async def main(): try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") @@ -34,7 +35,7 @@ async def main(): blob_service_client = BlobServiceClient.from_connection_string(CONNECTION_STRING) async with blob_service_client: source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = blob_service_client.get_blob_client("mycontainer", '59466-0.txt') + copied_blob = blob_service_client.get_blob_client("mycontainer", "59466-0.txt") # Copy started" await copied_blob.start_copy_from_url(source_blob) for i in range(10): @@ -62,5 +63,6 @@ async def main(): props = await copied_blob.get_blob_properties() print(props.copy.status) + if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py b/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py index 4c29216a54dd..d0565d2f24d7 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py @@ -6,7 +6,7 @@ # license information. # -------------------------------------------------------------------------- -''' +""" FILE: blob_samples_directory_interface.py DESCRIPTION: This example shows how to perform common filesystem-like operations on a @@ -23,180 +23,179 @@ USAGE: python blob_samples_directory_interface.py CONTAINER_NAME Set the environment variables with your own values before running the sample: 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account -''' +""" import os from azure.storage.blob import BlobServiceClient + class DirectoryClient: - def __init__(self, connection_string, container_name): - service_client = BlobServiceClient.from_connection_string(connection_string) - self.client = service_client.get_container_client(container_name) - - def upload(self, source, dest): - ''' - Upload a file or directory to a path inside the container - ''' - if (os.path.isdir(source)): - self.upload_dir(source, dest) - else: - self.upload_file(source, dest) - - def upload_file(self, source, dest): - ''' - Upload a single file to a path inside the container - ''' - print(f'Uploading {source} to {dest}') - with open(source, 'rb') as data: - self.client.upload_blob(name=dest, data=data) - - def upload_dir(self, source, dest): - ''' - Upload a directory to a path inside the container - ''' - prefix = '' if dest == '' else dest + '/' - prefix += os.path.basename(source) + '/' - for root, dirs, files in os.walk(source): - for name in files: - dir_part = os.path.relpath(root, source) - dir_part = '' if dir_part == '.' else dir_part + '/' - file_path = os.path.join(root, name) - blob_path = prefix + dir_part + name - self.upload_file(file_path, blob_path) - - def download(self, source, dest): - ''' - Download a file or directory to a path on the local filesystem - ''' - if not dest: - raise Exception('A destination must be provided') - - blobs = self.ls_files(source, recursive=True) - if blobs: - # if source is a directory, dest must also be a directory - if not source == '' and not source.endswith('/'): - source += '/' - if not dest.endswith('/'): - dest += '/' - # append the directory name from source to the destination - dest += os.path.basename(os.path.normpath(source)) + '/' - - blobs = [source + blob for blob in blobs] - for blob in blobs: - blob_dest = dest + os.path.relpath(blob, source) - self.download_file(blob, blob_dest) - else: - self.download_file(source, dest) - - def download_file(self, source, dest): - ''' - Download a single file to a path on the local filesystem - ''' - # dest is a directory if ending with '/' or '.', otherwise it's a file - if dest.endswith('.'): - dest += '/' - blob_dest = dest + os.path.basename(source) if dest.endswith('/') else dest - - print(f'Downloading {source} to {blob_dest}') - os.makedirs(os.path.dirname(blob_dest), exist_ok=True) - bc = self.client.get_blob_client(blob=source) - if not dest.endswith('/'): - with open(blob_dest, 'wb') as file: - data = bc.download_blob() - file.write(data.readall()) - - def ls_files(self, path, recursive=False): - ''' - List files under a path, optionally recursively - ''' - if not path == '' and not path.endswith('/'): - path += '/' - - blob_iter = self.client.list_blobs(name_starts_with=path) - files = [] - for blob in blob_iter: - relative_path = os.path.relpath(blob.name, path) - if recursive or not '/' in relative_path: - files.append(relative_path) - return files - - def ls_dirs(self, path, recursive=False): - ''' - List directories under a path, optionally recursively - ''' - if not path == '' and not path.endswith('/'): - path += '/' - - blob_iter = self.client.list_blobs(name_starts_with=path) - dirs = [] - for blob in blob_iter: - relative_dir = os.path.dirname(os.path.relpath(blob.name, path)) - if relative_dir and (recursive or not '/' in relative_dir) and not relative_dir in dirs: - dirs.append(relative_dir) - - return dirs - - def rm(self, path, recursive=False): - ''' - Remove a single file, or remove a path recursively - ''' - if recursive: - self.rmdir(path) - else: - print(f'Deleting {path}') - self.client.delete_blob(path) - - def rmdir(self, path): - ''' - Remove a directory and its contents recursively - ''' - blobs = self.ls_files(path, recursive=True) - if not blobs: - return - - if not path == '' and not path.endswith('/'): - path += '/' - blobs = [path + blob for blob in blobs] - print(f'Deleting {", ".join(blobs)}') - self.client.delete_blobs(*blobs) + def __init__(self, connection_string, container_name): + service_client = BlobServiceClient.from_connection_string(connection_string) + self.client = service_client.get_container_client(container_name) + + def upload(self, source, dest): + """ + Upload a file or directory to a path inside the container + """ + if os.path.isdir(source): + self.upload_dir(source, dest) + else: + self.upload_file(source, dest) + + def upload_file(self, source, dest): + """ + Upload a single file to a path inside the container + """ + print(f"Uploading {source} to {dest}") + with open(source, "rb") as data: + self.client.upload_blob(name=dest, data=data) + + def upload_dir(self, source, dest): + """ + Upload a directory to a path inside the container + """ + prefix = "" if dest == "" else dest + "/" + prefix += os.path.basename(source) + "/" + for root, dirs, files in os.walk(source): + for name in files: + dir_part = os.path.relpath(root, source) + dir_part = "" if dir_part == "." else dir_part + "/" + file_path = os.path.join(root, name) + blob_path = prefix + dir_part + name + self.upload_file(file_path, blob_path) + + def download(self, source, dest): + """ + Download a file or directory to a path on the local filesystem + """ + if not dest: + raise Exception("A destination must be provided") + + blobs = self.ls_files(source, recursive=True) + if blobs: + # if source is a directory, dest must also be a directory + if not source == "" and not source.endswith("/"): + source += "/" + if not dest.endswith("/"): + dest += "/" + # append the directory name from source to the destination + dest += os.path.basename(os.path.normpath(source)) + "/" + + blobs = [source + blob for blob in blobs] + for blob in blobs: + blob_dest = dest + os.path.relpath(blob, source) + self.download_file(blob, blob_dest) + else: + self.download_file(source, dest) + + def download_file(self, source, dest): + """ + Download a single file to a path on the local filesystem + """ + # dest is a directory if ending with '/' or '.', otherwise it's a file + if dest.endswith("."): + dest += "/" + blob_dest = dest + os.path.basename(source) if dest.endswith("/") else dest + + print(f"Downloading {source} to {blob_dest}") + os.makedirs(os.path.dirname(blob_dest), exist_ok=True) + bc = self.client.get_blob_client(blob=source) + if not dest.endswith("/"): + with open(blob_dest, "wb") as file: + data = bc.download_blob() + file.write(data.readall()) + + def ls_files(self, path, recursive=False): + """ + List files under a path, optionally recursively + """ + if not path == "" and not path.endswith("/"): + path += "/" + + blob_iter = self.client.list_blobs(name_starts_with=path) + files = [] + for blob in blob_iter: + relative_path = os.path.relpath(blob.name, path) + if recursive or not "/" in relative_path: + files.append(relative_path) + return files + + def ls_dirs(self, path, recursive=False): + """ + List directories under a path, optionally recursively + """ + if not path == "" and not path.endswith("/"): + path += "/" + + blob_iter = self.client.list_blobs(name_starts_with=path) + dirs = [] + for blob in blob_iter: + relative_dir = os.path.dirname(os.path.relpath(blob.name, path)) + if relative_dir and (recursive or not "/" in relative_dir) and not relative_dir in dirs: + dirs.append(relative_dir) + + return dirs + + def rm(self, path, recursive=False): + """ + Remove a single file, or remove a path recursively + """ + if recursive: + self.rmdir(path) + else: + print(f"Deleting {path}") + self.client.delete_blob(path) + + def rmdir(self, path): + """ + Remove a directory and its contents recursively + """ + blobs = self.ls_files(path, recursive=True) + if not blobs: + return + + if not path == "" and not path.endswith("/"): + path += "/" + blobs = [path + blob for blob in blobs] + print(f'Deleting {", ".join(blobs)}') + self.client.delete_blobs(*blobs) + # Sample setup import sys + try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: - print('AZURE_STORAGE_CONNECTION_STRING must be set') - sys.exit(1) + print("AZURE_STORAGE_CONNECTION_STRING must be set") + sys.exit(1) try: - CONTAINER_NAME = sys.argv[1] + CONTAINER_NAME = sys.argv[1] except IndexError: - print('usage: directory_interface.py CONTAINER_NAME') - print('error: the following arguments are required: CONTAINER_NAME') - sys.exit(1) - -SAMPLE_DIRS = [ - 'cats/calico', - 'cats/siamese', - 'cats/tabby' -] + print("usage: directory_interface.py CONTAINER_NAME") + print("error: the following arguments are required: CONTAINER_NAME") + sys.exit(1) + +SAMPLE_DIRS = ["cats/calico", "cats/siamese", "cats/tabby"] SAMPLE_FILES = [ - 'readme.txt', - 'cats/herds.txt', - 'cats/calico/anna.txt', - 'cats/calico/felix.txt', - 'cats/siamese/mocha.txt', - 'cats/tabby/bojangles.txt' + "readme.txt", + "cats/herds.txt", + "cats/calico/anna.txt", + "cats/calico/felix.txt", + "cats/siamese/mocha.txt", + "cats/tabby/bojangles.txt", ] for path in SAMPLE_DIRS: - os.makedirs(path, exist_ok=True) + os.makedirs(path, exist_ok=True) for path in SAMPLE_FILES: - with open(path, 'w') as file: - file.write('content') + with open(path, "w") as file: + file.write("content") # Sample body @@ -208,8 +207,8 @@ def rmdir(self, path): # After this call, the container will look like: # cat-herding/ # readme.txt -client.upload('readme.txt', 'cat-herding/readme.txt') -files = client.ls_files('', recursive=True) +client.upload("readme.txt", "cat-herding/readme.txt") +files = client.ls_files("", recursive=True) print(files) # Upload a directory to the container with a path prefix. The directory @@ -227,14 +226,14 @@ def rmdir(self, path): # mocha.txt # tabby/ # bojangles.txt -client.upload('cats', 'cat-herding') -files = client.ls_files('', recursive=True) +client.upload("cats", "cat-herding") +files = client.ls_files("", recursive=True) print(files) # List files in a single directory # Returns: # ['herds.txt'] -files = client.ls_files('cat-herding/cats') +files = client.ls_files("cat-herding/cats") print(files) # List files in a directory recursively @@ -246,19 +245,19 @@ def rmdir(self, path): # 'siamese/mocha.txt', # 'tabby/bojangles.txt' # ] -files = client.ls_files('cat-herding/cats', recursive=True) +files = client.ls_files("cat-herding/cats", recursive=True) print(files) # List directories in a single directory # Returns: # ['calico', 'siamese', 'tabby'] -dirs = client.ls_dirs('cat-herding/cats') +dirs = client.ls_dirs("cat-herding/cats") print(dirs) # List files in a directory recursively # Returns: # ['cats', 'cats/calico', 'cats/siamese', 'cats/tabby'] -dirs = client.ls_dirs('cat-herding', recursive=True) +dirs = client.ls_dirs("cat-herding", recursive=True) print(dirs) # Download a single file to a location on disk, specifying the destination file @@ -270,9 +269,10 @@ def rmdir(self, path): # After this call, your working directory will look like: # downloads/ # cat-info.txt -client.download('cat-herding/readme.txt', 'downloads/cat-info.txt') +client.download("cat-herding/readme.txt", "downloads/cat-info.txt") import glob -print(glob.glob('downloads/**', recursive=True)) + +print(glob.glob("downloads/**", recursive=True)) # Download a single file to a folder on disk, preserving the original file name. # When the destination ends with a slash '/' or is a relative path specifier @@ -286,8 +286,8 @@ def rmdir(self, path): # cat-info.txt # herd-info/ # herds.txt -client.download('cat-herding/cats/herds.txt', 'downloads/herd-info/') -print(glob.glob('downloads/**', recursive=True)) +client.download("cat-herding/cats/herds.txt", "downloads/herd-info/") +print(glob.glob("downloads/**", recursive=True)) # Download a directory to a folder on disk. The destination is always # interpreted as a directory name. The directory structure will be preserved @@ -309,8 +309,8 @@ def rmdir(self, path): # cat-info.txt # herd-info/ # herds.txt -client.download('cat-herding/cats', 'downloads/cat-data') -print(glob.glob('downloads/**', recursive=True)) +client.download("cat-herding/cats", "downloads/cat-data") +print(glob.glob("downloads/**", recursive=True)) # Delete a single file from the container # @@ -325,8 +325,8 @@ def rmdir(self, path): # mocha.txt # tabby/ # bojangles.txt -client.rm('cat-herding/cats/calico/felix.txt') -files = client.ls_files('', recursive=True) +client.rm("cat-herding/cats/calico/felix.txt") +files = client.ls_files("", recursive=True) print(files) # Delete files in a directory recursively. This is equivalent to @@ -335,21 +335,22 @@ def rmdir(self, path): # After this call, the container will look like: # cat-herding/ # readme.txt -client.rm('cat-herding/cats', recursive=True) -files = client.ls_files('', recursive=True) +client.rm("cat-herding/cats", recursive=True) +files = client.ls_files("", recursive=True) print(files) # Delete files in a directory recursively. This is equivalent to # client.rm('cat-herding', recursive=True) # # After this call, the container will be empty. -client.rmdir('cat-herding') -files = client.ls_files('', recursive=True) +client.rmdir("cat-herding") +files = client.ls_files("", recursive=True) print(files) # Sample cleanup import shutil -shutil.rmtree('downloads') -shutil.rmtree('cats') -os.remove('readme.txt') + +shutil.rmtree("downloads") +shutil.rmtree("cats") +os.remove("readme.txt") diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface_mimetype.py b/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface_mimetype.py index 5bb143de2fcf..7ab7cbe3664a 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface_mimetype.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface_mimetype.py @@ -6,7 +6,7 @@ # license information. # -------------------------------------------------------------------------- -''' +""" FILE: blob_samples_directory_interface_mimetype.py DESCRIPTION: This example is just adn addition to'blob_samples_directory_interface.py'. @@ -15,7 +15,7 @@ USAGE: python blob_samples_directory_interface_mimetype.py CONTAINER_NAME Set the environment variables with your own values before running the sample: 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account -''' +""" import sys import shutil @@ -25,54 +25,51 @@ import os from azure.storage.blob import ContentSettings + class DirectoryClientEx(DirectoryClient): # overriding upload_file method def upload_file(self, source, dest): - ''' + """ Upload a single file to a path inside the container with content-type set - ''' + """ content_type = mimetypes.guess_type(source)[0] - print(f'Uploading {source} to {dest} (type: {content_type})') - with open(source, 'rb') as data: - self.client.upload_blob(name=dest, data=data, content_settings=ContentSettings( - content_type=content_type)) + print(f"Uploading {source} to {dest} (type: {content_type})") + with open(source, "rb") as data: + self.client.upload_blob(name=dest, data=data, content_settings=ContentSettings(content_type=content_type)) + # Sample setup try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: - print('AZURE_STORAGE_CONNECTION_STRING must be set') + print("AZURE_STORAGE_CONNECTION_STRING must be set") sys.exit(1) try: CONTAINER_NAME = sys.argv[1] except IndexError: - print('usage: directory_interface.py CONTAINER_NAME') - print('error: the following arguments are required: CONTAINER_NAME') + print("usage: directory_interface.py CONTAINER_NAME") + print("error: the following arguments are required: CONTAINER_NAME") sys.exit(1) -SAMPLE_DIRS = [ - 'cats/calico', - 'cats/siamese', - 'cats/tabby' -] +SAMPLE_DIRS = ["cats/calico", "cats/siamese", "cats/tabby"] SAMPLE_FILES = [ - 'readme.txt', - 'cats/herds.txt', - 'cats/calico/anna.txt', - 'cats/calico/felix.txt', - 'cats/siamese/mocha.txt', - 'cats/tabby/bojangles.txt' + "readme.txt", + "cats/herds.txt", + "cats/calico/anna.txt", + "cats/calico/felix.txt", + "cats/siamese/mocha.txt", + "cats/tabby/bojangles.txt", ] for path in SAMPLE_DIRS: os.makedirs(path, exist_ok=True) for path in SAMPLE_FILES: - with open(path, 'w') as file: - file.write('content') + with open(path, "w") as file: + file.write("content") # Sample body @@ -84,8 +81,8 @@ def upload_file(self, source, dest): # After this call, the container will look like: # cat-herding/ # readme.txt -client.upload('readme.txt', 'cat-herding/readme.txt') -files = client.ls_files('', recursive=True) +client.upload("readme.txt", "cat-herding/readme.txt") +files = client.ls_files("", recursive=True) print(files) # Upload a directory to the container with a path prefix. The directory @@ -103,14 +100,14 @@ def upload_file(self, source, dest): # mocha.txt # tabby/ # bojangles.txt -client.upload('cats', 'cat-herding') -files = client.ls_files('', recursive=True) +client.upload("cats", "cat-herding") +files = client.ls_files("", recursive=True) print(files) # List files in a single directory # Returns: # ['herds.txt'] -files = client.ls_files('cat-herding/cats') +files = client.ls_files("cat-herding/cats") print(files) # List files in a directory recursively @@ -122,19 +119,19 @@ def upload_file(self, source, dest): # 'siamese/mocha.txt', # 'tabby/bojangles.txt' # ] -files = client.ls_files('cat-herding/cats', recursive=True) +files = client.ls_files("cat-herding/cats", recursive=True) print(files) # List directories in a single directory # Returns: # ['calico', 'siamese', 'tabby'] -dirs = client.ls_dirs('cat-herding/cats') +dirs = client.ls_dirs("cat-herding/cats") print(dirs) # List files in a directory recursively # Returns: # ['cats', 'cats/calico', 'cats/siamese', 'cats/tabby'] -dirs = client.ls_dirs('cat-herding', recursive=True) +dirs = client.ls_dirs("cat-herding", recursive=True) print(dirs) # Download a single file to a location on disk, specifying the destination file @@ -146,8 +143,8 @@ def upload_file(self, source, dest): # After this call, your working directory will look like: # downloads/ # cat-info.txt -client.download('cat-herding/readme.txt', 'downloads/cat-info.txt') -print(glob.glob('downloads/**', recursive=True)) +client.download("cat-herding/readme.txt", "downloads/cat-info.txt") +print(glob.glob("downloads/**", recursive=True)) # Download a single file to a folder on disk, preserving the original file name. # When the destination ends with a slash '/' or is a relative path specifier @@ -161,8 +158,8 @@ def upload_file(self, source, dest): # cat-info.txt # herd-info/ # herds.txt -client.download('cat-herding/cats/herds.txt', 'downloads/herd-info/') -print(glob.glob('downloads/**', recursive=True)) +client.download("cat-herding/cats/herds.txt", "downloads/herd-info/") +print(glob.glob("downloads/**", recursive=True)) # Download a directory to a folder on disk. The destination is always # interpreted as a directory name. The directory structure will be preserved @@ -184,8 +181,8 @@ def upload_file(self, source, dest): # cat-info.txt # herd-info/ # herds.txt -client.download('cat-herding/cats', 'downloads/cat-data') -print(glob.glob('downloads/**', recursive=True)) +client.download("cat-herding/cats", "downloads/cat-data") +print(glob.glob("downloads/**", recursive=True)) # Delete a single file from the container # @@ -200,8 +197,8 @@ def upload_file(self, source, dest): # mocha.txt # tabby/ # bojangles.txt -client.rm('cat-herding/cats/calico/felix.txt') -files = client.ls_files('', recursive=True) +client.rm("cat-herding/cats/calico/felix.txt") +files = client.ls_files("", recursive=True) print(files) # Delete files in a directory recursively. This is equivalent to @@ -210,20 +207,20 @@ def upload_file(self, source, dest): # After this call, the container will look like: # cat-herding/ # readme.txt -client.rm('cat-herding/cats', recursive=True) -files = client.ls_files('', recursive=True) +client.rm("cat-herding/cats", recursive=True) +files = client.ls_files("", recursive=True) print(files) # Delete files in a directory recursively. This is equivalent to # client.rm('cat-herding', recursive=True) # # After this call, the container will be empty. -client.rmdir('cat-herding') -files = client.ls_files('', recursive=True) +client.rmdir("cat-herding") +files = client.ls_files("", recursive=True) print(files) # Sample cleanup -shutil.rmtree('downloads') -shutil.rmtree('cats') -os.remove('readme.txt') +shutil.rmtree("downloads") +shutil.rmtree("cats") +os.remove("readme.txt") diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs.py b/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs.py index 9c26cf709bab..7f4d75775d29 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs.py @@ -20,9 +20,10 @@ import sys from azure.storage.blob import ContainerClient + def main(): try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") @@ -32,7 +33,8 @@ def main(): blob_list = container.list_blobs() for blob in blob_list: - print(blob.name + '\n') + print(blob.name + "\n") + if __name__ == "__main__": main() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs_async.py index e98180583c57..1a8862104497 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs_async.py @@ -21,9 +21,10 @@ import asyncio from azure.storage.blob.aio import ContainerClient + async def main(): try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) @@ -32,7 +33,8 @@ async def main(): async with container: async for blob in container.list_blobs(): - print(blob.name + '\n') + print(blob.name + "\n") + if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py b/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py index a96d9a8d5880..e97ebb022afb 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py @@ -21,24 +21,28 @@ # set up -SOURCE_FILE = 'SampleSource.txt' -DEST_FILE = 'BlockDestination.txt' +SOURCE_FILE = "SampleSource.txt" +DEST_FILE = "BlockDestination.txt" class BlobSamples(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") - #--Begin Blob Samples----------------------------------------------------------------- + # --Begin Blob Samples----------------------------------------------------------------- def create_container_sample(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: create_container_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: create_container_sample" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a new ContainerClient @@ -57,12 +61,16 @@ def create_container_sample(self): def block_blob_sample(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: block_blob_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: block_blob_sample" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a new ContainerClient @@ -97,21 +105,26 @@ def block_blob_sample(self): def stream_block_blob(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: stream_block_blob") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: stream_block_blob" + ) sys.exit(1) import uuid + # Instantiate a new BlobServiceClient using a connection string - set chunk size to 1MB from azure.storage.blob import BlobServiceClient, BlobBlock - blob_service_client = BlobServiceClient.from_connection_string(self.connection_string, - max_single_get_size=1024*1024, - max_chunk_get_size=1024*1024) + + blob_service_client = BlobServiceClient.from_connection_string( + self.connection_string, max_single_get_size=1024 * 1024, max_chunk_get_size=1024 * 1024 + ) # Instantiate a new ContainerClient container_client = blob_service_client.get_container_client("containersync") # Generate 4MB of data - data = b'a'*4*1024*1024 + data = b"a" * 4 * 1024 * 1024 try: # Create new Container in the service @@ -146,12 +159,16 @@ def stream_block_blob(self): def page_blob_sample(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: page_blob_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: page_blob_sample" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a new ContainerClient @@ -165,7 +182,7 @@ def page_blob_sample(self): blob_client = container_client.get_blob_client("mypageblob") # Upload content to the Page Blob - data = b'abcd'*128 + data = b"abcd" * 128 blob_client.upload_blob(data, blob_type="PageBlob") # Download Page Blob @@ -182,12 +199,16 @@ def page_blob_sample(self): def append_blob_sample(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: append_blob_sample") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: append_blob_sample" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # Instantiate a new ContainerClient @@ -217,10 +238,10 @@ def append_blob_sample(self): container_client.delete_container() -if __name__ == '__main__': +if __name__ == "__main__": sample = BlobSamples() sample.create_container_sample() sample.block_blob_sample() sample.append_blob_sample() sample.page_blob_sample() - sample.stream_block_blob() \ No newline at end of file + sample.stream_block_blob() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py index 0fdff6f7845b..1744a3b174f4 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py @@ -21,24 +21,28 @@ import asyncio # set up -DEST_FILE = 'BlockDestination.txt' -SOURCE_FILE = 'SampleSource.txt' +DEST_FILE = "BlockDestination.txt" +SOURCE_FILE = "SampleSource.txt" class BlobSamplesAsync(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") - #--Begin Blob Samples----------------------------------------------------------------- + # --Begin Blob Samples----------------------------------------------------------------- async def create_container_sample_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: create_container_sample_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: create_container_sample_async" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -60,12 +64,16 @@ async def create_container_sample_async(self): async def block_blob_sample_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: block_blob_sample_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: block_blob_sample_async" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -102,23 +110,28 @@ async def block_blob_sample_async(self): async def stream_block_blob(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: stream_block_blob_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: stream_block_blob_async" + ) sys.exit(1) import uuid + # Instantiate a new BlobServiceClient using a connection string - set chunk size to 1MB from azure.storage.blob import BlobBlock from azure.storage.blob.aio import BlobServiceClient - blob_service_client = BlobServiceClient.from_connection_string(self.connection_string, - max_single_get_size=1024*1024, - max_chunk_get_size=1024*1024) + + blob_service_client = BlobServiceClient.from_connection_string( + self.connection_string, max_single_get_size=1024 * 1024, max_chunk_get_size=1024 * 1024 + ) async with blob_service_client: # Instantiate a new ContainerClient container_client = blob_service_client.get_container_client("containerasync") # Generate 4MB of data - data = b'a'*4*1024*1024 + data = b"a" * 4 * 1024 * 1024 try: # Create new Container in the service @@ -153,12 +166,16 @@ async def stream_block_blob(self): async def page_blob_sample_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: page_blob_sample_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: page_blob_sample_async" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -173,7 +190,7 @@ async def page_blob_sample_async(self): blob_client = container_client.get_blob_client("mypageblob") # Upload content to the Page Blob - data = b'abcd'*128 + data = b"abcd" * 128 await blob_client.upload_blob(data, blob_type="PageBlob") # Download Page Blob @@ -191,12 +208,16 @@ async def page_blob_sample_async(self): async def append_blob_sample_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: append_blob_sample_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: append_blob_sample_async" + ) sys.exit(1) # Instantiate a new BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -236,5 +257,6 @@ async def main(): await sample.page_blob_sample_async() await sample.stream_block_blob() -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_network_activity_logging.py b/sdk/storage/azure-storage-blob/samples/blob_samples_network_activity_logging.py index 8d894864d977..6108457451ad 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_network_activity_logging.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_network_activity_logging.py @@ -36,15 +36,15 @@ # Retrieve connection string from environment variables # and construct a blob service client. -connection_string = os.environ.get('AZURE_STORAGE_CONNECTION_STRING', None) +connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING", None) if not connection_string: - print('AZURE_STORAGE_CONNECTION_STRING required.') + print("AZURE_STORAGE_CONNECTION_STRING required.") sys.exit(1) service_client = BlobServiceClient.from_connection_string(connection_string) # Retrieve a compatible logger and add a handler to send the output to console (STDOUT). # Compatible loggers in this case include `azure` and `azure.storage`. -logger = logging.getLogger('azure.storage.blob') +logger = logging.getLogger("azure.storage.blob") logger.addHandler(logging.StreamHandler(stream=sys.stdout)) # Logging policy logs network activity at the DEBUG level. Set the level on the logger prior to the call. diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_proxy_configuration.py b/sdk/storage/azure-storage-blob/samples/blob_samples_proxy_configuration.py index b3102918efb5..8e9b1105df12 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_proxy_configuration.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_proxy_configuration.py @@ -29,24 +29,21 @@ from azure.storage.blob import BlobServiceClient # Retrieve connection string from environment variables -connection_string = os.environ.get('AZURE_STORAGE_CONNECTION_STRING', None) +connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING", None) if not connection_string: - print('AZURE_STORAGE_CONNECTION_STRING required.') + print("AZURE_STORAGE_CONNECTION_STRING required.") sys.exit(1) # configure logging -logger = logging.getLogger('azure') +logger = logging.getLogger("azure") logger.addHandler(logging.StreamHandler(stream=sys.stdout)) logger.setLevel(logging.DEBUG) # TODO: Update this with your actual proxy information. -http_proxy = 'http://10.10.1.10:1180' -https_proxy = 'http://user:password@10.10.1.10:1180/' +http_proxy = "http://10.10.1.10:1180" +https_proxy = "http://user:password@10.10.1.10:1180/" -proxies = { - 'http': http_proxy, - 'https': https_proxy -} +proxies = {"http": http_proxy, "https": https_proxy} # Construct the BlobServiceClient, including the customized configuation. service_client = BlobServiceClient.from_connection_string(connection_string, proxies=proxies) containers = list(service_client.list_containers(logging_enable=True)) @@ -54,8 +51,8 @@ # Alternatively, proxy settings can be set using environment variables, with no # custom configuration necessary. -HTTP_PROXY_ENV_VAR = 'HTTP_PROXY' -HTTPS_PROXY_ENV_VAR = 'HTTPS_PROXY' +HTTP_PROXY_ENV_VAR = "HTTP_PROXY" +HTTPS_PROXY_ENV_VAR = "HTTPS_PROXY" os.environ[HTTPS_PROXY_ENV_VAR] = https_proxy service_client = BlobServiceClient.from_connection_string(connection_string) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_query.py b/sdk/storage/azure-storage-blob/samples/blob_samples_query.py index 0b1f19e49ba5..8e80efa4a32a 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_query.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_query.py @@ -21,7 +21,7 @@ def main(): try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") @@ -36,6 +36,7 @@ def main(): pass # [START query] errors = [] + def on_error(error): errors.append(error) @@ -46,9 +47,13 @@ def on_error(error): # select the second column of the csv file query_expression = "SELECT _2 from BlobStorage" - input_format = DelimitedTextDialect(delimiter=',', quotechar='"', lineterminator='\n', escapechar="", has_header=False) - output_format = DelimitedJsonDialect(delimiter='\n') - reader = blob_client.query_blob(query_expression, on_error=on_error, blob_format=input_format, output_format=output_format) + input_format = DelimitedTextDialect( + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=False + ) + output_format = DelimitedJsonDialect(delimiter="\n") + reader = blob_client.query_blob( + query_expression, on_error=on_error, blob_format=input_format, output_format=output_format + ) content = reader.readall() # [END query] print(content) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_service.py b/sdk/storage/azure-storage-blob/samples/blob_samples_service.py index 837ee89e3f4f..aa5538f66def 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_service.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_service.py @@ -18,33 +18,42 @@ import sys from azure.core.exceptions import ResourceNotFoundError, ResourceExistsError + class BlobServiceSamples(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") def get_storage_account_information(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_storage_account_information") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_storage_account_information" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START get_blob_service_account_info] account_info = blob_service_client.get_account_information() - print('Using Storage SKU: {}'.format(account_info['sku_name'])) + print("Using Storage SKU: {}".format(account_info["sku_name"])) # [END get_blob_service_account_info] def blob_service_properties(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_service_properties") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_service_properties" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START set_blob_service_properties] @@ -52,15 +61,18 @@ def blob_service_properties(self): from azure.storage.blob import BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy # Create logging settings - logging = BlobAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + logging = BlobAnalyticsLogging( + read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Create metrics for requests statistics hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5)) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=5)) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Create CORS rules - cors_rule = CorsRule(['www.xyz.com'], ['GET']) + cors_rule = CorsRule(["www.xyz.com"], ["GET"]) cors = [cors_rule] # Set the service properties @@ -73,12 +85,16 @@ def blob_service_properties(self): def blob_service_stats(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_service_stats") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_service_stats" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START get_blob_service_stats] @@ -87,12 +103,16 @@ def blob_service_stats(self): def container_operations(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_operations") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_operations" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) try: @@ -108,12 +128,12 @@ def container_operations(self): # List all containers all_containers = blob_service_client.list_containers(include_metadata=True) for container in all_containers: - print(container['name'], container['metadata']) + print(container["name"], container["metadata"]) # Filter results with name prefix - test_containers = blob_service_client.list_containers(name_starts_with='test-') + test_containers = blob_service_client.list_containers(name_starts_with="test-") for container in test_containers: - print(container['name'], container['metadata']) + print(container["name"], container["metadata"]) # [END bsc_list_containers] finally: @@ -127,12 +147,16 @@ def container_operations(self): def get_blob_and_container_clients(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_and_container_clients") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_and_container_clients" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [START bsc_get_container_client] @@ -162,12 +186,16 @@ def get_blob_and_container_clients(self): def get_blob_service_client_from_container_client(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_service_client_from_container_client") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_service_client_from_container_client" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob import ContainerClient + container_client1 = ContainerClient.from_connection_string(self.connection_string, "container") container_client1.create_container() @@ -181,7 +209,7 @@ def get_blob_service_client_from_container_client(self): # [END get_blob_service_client_from_container_client] -if __name__ == '__main__': +if __name__ == "__main__": sample = BlobServiceSamples() sample.get_storage_account_information() sample.get_blob_and_container_clients() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py index 4915f8f83503..2e877547e204 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py @@ -20,34 +20,43 @@ import asyncio from azure.core.exceptions import ResourceNotFoundError, ResourceExistsError + class BlobServiceSamplesAsync(object): connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") async def get_storage_account_information_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_storage_account_information_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_storage_account_information_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: # [START get_blob_service_account_info] account_info = await blob_service_client.get_account_information() - print('Using Storage SKU: {}'.format(account_info['sku_name'])) + print("Using Storage SKU: {}".format(account_info["sku_name"])) # [END get_blob_service_account_info] async def blob_service_properties_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_service_properties_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_service_properties_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -56,15 +65,20 @@ async def blob_service_properties_async(self): from azure.storage.blob import BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy # Create logging settings - logging = BlobAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + logging = BlobAnalyticsLogging( + read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Create metrics for requests statistics - hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5)) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=5)) + hour_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Create CORS rules - cors_rule = CorsRule(['www.xyz.com'], ['GET']) + cors_rule = CorsRule(["www.xyz.com"], ["GET"]) cors = [cors_rule] # Set the service properties @@ -77,12 +91,16 @@ async def blob_service_properties_async(self): async def blob_service_stats_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: blob_service_stats_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: blob_service_stats_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -92,12 +110,16 @@ async def blob_service_stats_async(self): async def container_operations_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: container_operations_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: container_operations_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -117,15 +139,15 @@ async def container_operations_async(self): all_containers.append(container) for container in all_containers: - print(container['name'], container['metadata']) + print(container["name"], container["metadata"]) # Filter results with name prefix test_containers = [] - async for name in blob_service_client.list_containers(name_starts_with='test-'): + async for name in blob_service_client.list_containers(name_starts_with="test-"): test_containers.append(name) for container in test_containers: - print(container['name'], container['metadata']) + print(container["name"], container["metadata"]) # [END bsc_list_containers] finally: @@ -139,12 +161,16 @@ async def container_operations_async(self): async def get_blob_and_container_clients_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_and_container_clients_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_and_container_clients_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient + blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: @@ -180,11 +206,15 @@ async def get_blob_and_container_clients_async(self): async def get_blob_service_client_from_container_client_async(self): if self.connection_string is None: - print("Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + '\n' + - "Test: get_blob_service_client_from_container_client_async") + print( + "Missing required environment variable: AZURE_STORAGE_CONNECTION_STRING." + + "\n" + + "Test: get_blob_service_client_from_container_client_async" + ) sys.exit(1) # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import ContainerClient + container_client1 = ContainerClient.from_connection_string(self.connection_string, "container") await container_client1.create_container() @@ -209,5 +239,6 @@ async def main(): await sample.blob_service_stats_async() await sample.get_blob_service_client_from_container_client_async() -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy.py b/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy.py index 09efd04f4128..19fd571e5591 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy.py @@ -46,35 +46,38 @@ from azure.storage.blob import BlobPrefix try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) + def walk_container(client, container): container_client = client.get_container_client(container.name) - print('C: {}'.format(container.name)) + print("C: {}".format(container.name)) depth = 1 - separator = ' ' + separator = " " def walk_blob_hierarchy(prefix=""): nonlocal depth for item in container_client.walk_blobs(name_starts_with=prefix): - short_name = item.name[len(prefix):] + short_name = item.name[len(prefix) :] if isinstance(item, BlobPrefix): - print('F: ' + separator * depth + short_name) + print("F: " + separator * depth + short_name) depth += 1 walk_blob_hierarchy(prefix=item.name) depth -= 1 else: - message = 'B: ' + separator * depth + short_name - results = list(container_client.list_blobs(name_starts_with=item.name, include=['snapshots'])) + message = "B: " + separator * depth + short_name + results = list(container_client.list_blobs(name_starts_with=item.name, include=["snapshots"])) num_snapshots = len(results) - 1 if num_snapshots: message += " ({} snapshots)".format(num_snapshots) print(message) + walk_blob_hierarchy() + try: service_client = BlobServiceClient.from_connection_string(CONNECTION_STRING) containers = service_client.list_containers() diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy_async.py b/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy_async.py index 54e2f78698d5..f34d5e1a3840 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy_async.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy_async.py @@ -45,37 +45,40 @@ from azure.storage.blob.aio import BlobServiceClient, BlobPrefix try: - CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + CONNECTION_STRING = os.environ["AZURE_STORAGE_CONNECTION_STRING"] except KeyError: print("AZURE_STORAGE_CONNECTION_STRING must be set.") sys.exit(1) + async def walk_container(client, container): container_client = client.get_container_client(container.name) - print('C: {}'.format(container.name)) + print("C: {}".format(container.name)) depth = 1 - separator = ' ' + separator = " " async def walk_blob_hierarchy(prefix=""): nonlocal depth async for item in container_client.walk_blobs(name_starts_with=prefix): - short_name = item.name[len(prefix):] + short_name = item.name[len(prefix) :] if isinstance(item, BlobPrefix): - print('F: ' + separator * depth + short_name) + print("F: " + separator * depth + short_name) depth += 1 await walk_blob_hierarchy(prefix=item.name) depth -= 1 else: - message = 'B: ' + separator * depth + short_name + message = "B: " + separator * depth + short_name snapshots = [] - async for snapshot in container_client.list_blobs(name_starts_with=item.name, include=['snapshots']): + async for snapshot in container_client.list_blobs(name_starts_with=item.name, include=["snapshots"]): snapshots.append(snapshot) num_snapshots = len(snapshots) - 1 if num_snapshots: message += " ({} snapshots)".format(num_snapshots) print(message) + await walk_blob_hierarchy() + async def main(): try: async with BlobServiceClient.from_connection_string(CONNECTION_STRING) as service_client: @@ -86,5 +89,6 @@ async def main(): print(error) sys.exit(1) -if __name__ == '__main__': + +if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/storage/azure-storage-blob/setup.py b/sdk/storage/azure-storage-blob/setup.py index b011968e8e1c..c7cd8b7d31a4 100644 --- a/sdk/storage/azure-storage-blob/setup.py +++ b/sdk/storage/azure-storage-blob/setup.py @@ -1,91 +1,71 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- - +# coding: utf-8 import os import re - from setuptools import setup, find_packages -# Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-storage-blob" -PACKAGE_PPRINT_NAME = "Azure Blob Storage" +PACKAGE_PPRINT_NAME = "Azure Storage Blob" # a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace('-', '/') - -# azure-storage v0.36.0 and prior are not compatible with this package -try: - import azure.storage - - try: - ver = azure.storage.__version__ - raise Exception( - f'This package is incompatible with azure-storage=={ver}. ' + - ' Uninstall it with "pip uninstall azure-storage".' - ) - except AttributeError: - pass -except ImportError: - pass +package_folder_path = PACKAGE_NAME.replace("-", "/") # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: - raise RuntimeError('Cannot find version information') + raise RuntimeError("Cannot find version information") + setup( name=PACKAGE_NAME, version=version, - include_package_data=True, - description=f'Microsoft {PACKAGE_PPRINT_NAME} Client Library for Python', - long_description=open('README.md', 'r').read(), - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', - author_email='ascl@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob', + description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", keywords="azure, azure sdk", classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'License :: OSI Approved :: MIT License', + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", ], zip_safe=False, - packages=find_packages(exclude=[ - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.storage', - 'tests', - 'tests.blob', - 'tests.common' - ]), - python_requires=">=3.8", + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.storage", + ] + ), + include_package_data=True, + package_data={ + "azure.storage.blob": ["py.typed"], + }, install_requires=[ + "isodate>=0.6.1", "azure-core>=1.30.0", - "cryptography>=2.1.4", "typing-extensions>=4.6.0", - "isodate>=0.6.1" ], - extras_require={ - "aio": [ - "azure-core[aio]>=1.30.0", - ], - }, + python_requires=">=3.8", ) diff --git a/sdk/storage/azure-storage-blob/tests/avro/test_avro.py b/sdk/storage/azure-storage-blob/tests/avro/test_avro.py index 2b29c0995c10..3ae71b31bfe4 100644 --- a/sdk/storage/azure-storage-blob/tests/avro/test_avro.py +++ b/sdk/storage/azure-storage-blob/tests/avro/test_avro.py @@ -1,4 +1,3 @@ - # coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -14,30 +13,31 @@ from azure.storage.blob._shared.avro.avro_io import DatumReader SCHEMAS_TO_VALIDATE = ( - ('"null"', None), - ('"boolean"', True), - ('"string"', 'adsfasdf09809dsf-=adsf'), - ('"bytes"', b'12345abcd'), - ('"int"', 1234), - ('"long"', 1234), - ('"float"', 1234.0), - ('"double"', 1234.0), - ('{"type": "fixed", "name": "Test", "size": 1}', b'B'), - ('{"type": "enum", "name": "Test", "symbols": ["A", "B"]}', 'B'), - ('{"type": "array", "items": "long"}', [1, 3, 2]), - ('{"type": "map", "values": "long"}', {'a': 1, 'b': 3, 'c': 2}), - ('["string", "null", "long"]', None), - - (""" + ('"null"', None), + ('"boolean"', True), + ('"string"', "adsfasdf09809dsf-=adsf"), + ('"bytes"', b"12345abcd"), + ('"int"', 1234), + ('"long"', 1234), + ('"float"', 1234.0), + ('"double"', 1234.0), + ('{"type": "fixed", "name": "Test", "size": 1}', b"B"), + ('{"type": "enum", "name": "Test", "symbols": ["A", "B"]}', "B"), + ('{"type": "array", "items": "long"}', [1, 3, 2]), + ('{"type": "map", "values": "long"}', {"a": 1, "b": 3, "c": 2}), + ('["string", "null", "long"]', None), + ( + """ { "type": "record", "name": "Test", "fields": [{"name": "f", "type": "long"}] } """, - {'f': 5}), - - (""" + {"f": 5}, + ), + ( + """ { "type": "record", "name": "Lisp", @@ -56,78 +56,83 @@ }] } """, - {'value': {'car': {'value': 'head'}, 'cdr': {'value': None}}}), + {"value": {"car": {"value": "head"}, "cdr": {"value": None}}}, + ), ) -CODECS_TO_VALIDATE = ('null', 'deflate') +CODECS_TO_VALIDATE = ("null", "deflate") CHANGE_FEED_RECORD = { - 'data': { - 'api': 'PutBlob', - 'blobPropertiesUpdated': None, - 'blobType': 'BlockBlob', - 'clientRequestId': '75b6c460-fcd0-11e9-87e2-85def057dae9', - 'contentLength': 12, - 'contentType': 'text/plain', - 'etag': '0x8D75EF45A3B8617', - 'previousInfo': None, - 'requestId': 'bb219c8e-401e-0028-1fdd-90f393000000', - 'sequencer': '000000000000000000000000000017140000000000000fcc', - 'snapshot': None, - 'storageDiagnostics': {'bid': 'd3053fa1-a006-0042-00dd-902bbb000000', - 'seq': '(5908,134,4044,0)', - 'sid': '5aaf98bf-f1d8-dd76-2dd2-9b60c689538d'}, - 'url': ''}, - 'eventTime': '2019-11-01T17:53:07.5106080Z', - 'eventType': 'BlobCreated', - 'id': 'bb219c8e-401e-0028-1fdd-90f393069ae4', - 'schemaVersion': 3, - 'subject': '/blobServices/default/containers/test/blobs/sdf.txt', - 'topic': '/subscriptions/ba45b233-e2ef-4169-8808-49eb0d8eba0d/resourceGroups/XClient/providers/Microsoft.Storage/storageAccounts/seanchangefeedstage'} + "data": { + "api": "PutBlob", + "blobPropertiesUpdated": None, + "blobType": "BlockBlob", + "clientRequestId": "75b6c460-fcd0-11e9-87e2-85def057dae9", + "contentLength": 12, + "contentType": "text/plain", + "etag": "0x8D75EF45A3B8617", + "previousInfo": None, + "requestId": "bb219c8e-401e-0028-1fdd-90f393000000", + "sequencer": "000000000000000000000000000017140000000000000fcc", + "snapshot": None, + "storageDiagnostics": { + "bid": "d3053fa1-a006-0042-00dd-902bbb000000", + "seq": "(5908,134,4044,0)", + "sid": "5aaf98bf-f1d8-dd76-2dd2-9b60c689538d", + }, + "url": "", + }, + "eventTime": "2019-11-01T17:53:07.5106080Z", + "eventType": "BlobCreated", + "id": "bb219c8e-401e-0028-1fdd-90f393069ae4", + "schemaVersion": 3, + "subject": "/blobServices/default/containers/test/blobs/sdf.txt", + "topic": "/subscriptions/ba45b233-e2ef-4169-8808-49eb0d8eba0d/resourceGroups/XClient/providers/Microsoft.Storage/storageAccounts/seanchangefeedstage", +} class AvroReaderTests(unittest.TestCase): @classmethod def setUpClass(cls): test_file_path = inspect.getfile(cls) - cls._samples_dir_root = os.path.join(os.path.dirname(test_file_path), 'samples') + cls._samples_dir_root = os.path.join(os.path.dirname(test_file_path), "samples") def test_reader(self): correct = 0 nitems = 10 for iexample, (writer_schema, datum) in enumerate(SCHEMAS_TO_VALIDATE): for codec in CODECS_TO_VALIDATE: - file_path = os.path.join(AvroReaderTests._samples_dir_root, 'test_' + codec + '_' + str(iexample) + '.avro') - with open(file_path, 'rb') as reader: + file_path = os.path.join( + AvroReaderTests._samples_dir_root, "test_" + codec + "_" + str(iexample) + ".avro" + ) + with open(file_path, "rb") as reader: datum_reader = DatumReader() with DataFileReader(reader, datum_reader) as dfr: round_trip_data = list(dfr) if ([datum] * nitems) == round_trip_data: correct += 1 - self.assertEqual( - correct, - len(CODECS_TO_VALIDATE) * len(SCHEMAS_TO_VALIDATE)) + self.assertEqual(correct, len(CODECS_TO_VALIDATE) * len(SCHEMAS_TO_VALIDATE)) def test_reader_with_bytes_io(self): correct = 0 nitems = 10 for iexample, (writer_schema, datum) in enumerate(SCHEMAS_TO_VALIDATE): for codec in CODECS_TO_VALIDATE: - file_path = os.path.join(AvroReaderTests._samples_dir_root, 'test_' + codec + '_' + str(iexample) + '.avro') - with open(file_path, 'rb') as reader: + file_path = os.path.join( + AvroReaderTests._samples_dir_root, "test_" + codec + "_" + str(iexample) + ".avro" + ) + with open(file_path, "rb") as reader: data = BytesIO(reader.read()) datum_reader = DatumReader() with DataFileReader(data, datum_reader) as dfr: round_trip_data = list(dfr) if ([datum] * nitems) == round_trip_data: correct += 1 - self.assertEqual( - correct, - len(CODECS_TO_VALIDATE) * len(SCHEMAS_TO_VALIDATE)) + self.assertEqual(correct, len(CODECS_TO_VALIDATE) * len(SCHEMAS_TO_VALIDATE)) def test_change_feed(self): - file_path = os.path.join(AvroReaderTests._samples_dir_root, 'changeFeed.avro') - with open(file_path, 'rb') as reader: + file_path = os.path.join(AvroReaderTests._samples_dir_root, "changeFeed.avro") + with open(file_path, "rb") as reader: datum_reader = DatumReader() with DataFileReader(reader, datum_reader) as dfr: data = list(dfr) @@ -137,10 +142,10 @@ def test_change_feed(self): def test_with_hearder_reader(self): # Note: only when the data stream doesn't have header, we need header stream to help - file_path = os.path.join(AvroReaderTests._samples_dir_root, 'changeFeed.avro') + file_path = os.path.join(AvroReaderTests._samples_dir_root, "changeFeed.avro") # this data stream has header full_data_stream = BytesIO() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: full_data = reader.read() full_data_stream.write(full_data) # This initialization helps find the position after the first sync_marker @@ -149,12 +154,12 @@ def test_with_hearder_reader(self): # construct the partial data stream which doesn't have header partial_data_stream = _HeaderStream() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: reader.seek(position_after_sync_marker) partial_data_stream.write(reader.read()) header_stream = _HeaderStream() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: header_data = reader.read() header_stream.write(header_data) diff --git a/sdk/storage/azure-storage-blob/tests/avro/test_avro_async.py b/sdk/storage/azure-storage-blob/tests/avro/test_avro_async.py index 1273c659cecd..5dd1c4972d45 100644 --- a/sdk/storage/azure-storage-blob/tests/avro/test_avro_async.py +++ b/sdk/storage/azure-storage-blob/tests/avro/test_avro_async.py @@ -1,4 +1,3 @@ - # coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -17,31 +16,35 @@ from .test_avro import SCHEMAS_TO_VALIDATE -CODECS_TO_VALIDATE = ['null'] +CODECS_TO_VALIDATE = ["null"] CHANGE_FEED_RECORD = { - 'data': { - 'api': 'PutBlob', - 'blobPropertiesUpdated': None, - 'blobType': 'BlockBlob', - 'clientRequestId': '75b6c460-fcd0-11e9-87e2-85def057dae9', - 'contentLength': 12, - 'contentType': 'text/plain', - 'etag': '0x8D75EF45A3B8617', - 'previousInfo': None, - 'requestId': 'bb219c8e-401e-0028-1fdd-90f393000000', - 'sequencer': '000000000000000000000000000017140000000000000fcc', - 'snapshot': None, - 'storageDiagnostics': {'bid': 'd3053fa1-a006-0042-00dd-902bbb000000', - 'seq': '(5908,134,4044,0)', - 'sid': '5aaf98bf-f1d8-dd76-2dd2-9b60c689538d'}, - 'url': ''}, - 'eventTime': '2019-11-01T17:53:07.5106080Z', - 'eventType': 'BlobCreated', - 'id': 'bb219c8e-401e-0028-1fdd-90f393069ae4', - 'schemaVersion': 3, - 'subject': '/blobServices/default/containers/test/blobs/sdf.txt', - 'topic': '/subscriptions/ba45b233-e2ef-4169-8808-49eb0d8eba0d/resourceGroups/XClient/providers/Microsoft.Storage/storageAccounts/seanchangefeedstage'} + "data": { + "api": "PutBlob", + "blobPropertiesUpdated": None, + "blobType": "BlockBlob", + "clientRequestId": "75b6c460-fcd0-11e9-87e2-85def057dae9", + "contentLength": 12, + "contentType": "text/plain", + "etag": "0x8D75EF45A3B8617", + "previousInfo": None, + "requestId": "bb219c8e-401e-0028-1fdd-90f393000000", + "sequencer": "000000000000000000000000000017140000000000000fcc", + "snapshot": None, + "storageDiagnostics": { + "bid": "d3053fa1-a006-0042-00dd-902bbb000000", + "seq": "(5908,134,4044,0)", + "sid": "5aaf98bf-f1d8-dd76-2dd2-9b60c689538d", + }, + "url": "", + }, + "eventTime": "2019-11-01T17:53:07.5106080Z", + "eventType": "BlobCreated", + "id": "bb219c8e-401e-0028-1fdd-90f393069ae4", + "schemaVersion": 3, + "subject": "/blobServices/default/containers/test/blobs/sdf.txt", + "topic": "/subscriptions/ba45b233-e2ef-4169-8808-49eb0d8eba0d/resourceGroups/XClient/providers/Microsoft.Storage/storageAccounts/seanchangefeedstage", +} class AsyncBufferedReaderWrapper: @@ -62,7 +65,7 @@ class AvroReaderTestsAsync(unittest.TestCase): @classmethod def setUpClass(cls): test_file_path = inspect.getfile(cls) - cls._samples_dir_root = os.path.join(os.path.dirname(test_file_path), 'samples') + cls._samples_dir_root = os.path.join(os.path.dirname(test_file_path), "samples") @pytest.mark.asyncio async def test_reader(self): @@ -70,8 +73,10 @@ async def test_reader(self): nitems = 10 for iexample, (writer_schema, datum) in enumerate(SCHEMAS_TO_VALIDATE): for codec in CODECS_TO_VALIDATE: - file_path = os.path.join(AvroReaderTestsAsync._samples_dir_root, 'test_' + codec + '_' + str(iexample) + '.avro') - with open(file_path, 'rb') as reader: + file_path = os.path.join( + AvroReaderTestsAsync._samples_dir_root, "test_" + codec + "_" + str(iexample) + ".avro" + ) + with open(file_path, "rb") as reader: datum_reader = AsyncDatumReader() async_reader = AsyncBufferedReaderWrapper(reader) async with await AsyncDataFileReader(async_reader, datum_reader).init() as dfr: @@ -84,8 +89,8 @@ async def test_reader(self): @pytest.mark.asyncio async def test_change_feed(self): - file_path = os.path.join(AvroReaderTestsAsync._samples_dir_root, 'changeFeed.avro') - with open(file_path, 'rb') as reader: + file_path = os.path.join(AvroReaderTestsAsync._samples_dir_root, "changeFeed.avro") + with open(file_path, "rb") as reader: datum_reader = AsyncDatumReader() async_reader = AsyncBufferedReaderWrapper(reader) async with await AsyncDataFileReader(async_reader, datum_reader).init() as dfr: @@ -99,10 +104,10 @@ async def test_change_feed(self): @pytest.mark.asyncio async def test_with_header_reader(self): # Note: only when the data stream doesn't have header, we need header stream to help - file_path = os.path.join(AvroReaderTestsAsync._samples_dir_root, 'changeFeed.avro') + file_path = os.path.join(AvroReaderTestsAsync._samples_dir_root, "changeFeed.avro") # this data stream has header full_data_stream = _HeaderStream() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: full_data = reader.read() await full_data_stream.write(full_data) # This initialization helps find the position after the first sync_marker @@ -111,12 +116,12 @@ async def test_with_header_reader(self): # construct the partial data stream which doesn't have header partial_data_stream = _HeaderStream() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: reader.seek(position_after_sync_marker) await partial_data_stream.write(reader.read()) header_stream = _HeaderStream() - with open(file_path, 'rb') as reader: + with open(file_path, "rb") as reader: header_data = reader.read() await header_stream.write(header_data) @@ -128,6 +133,7 @@ async def test_with_header_reader(self): self.assertEqual(CHANGE_FEED_RECORD, records[0]) self.assertIsNot(partial_data_stream.object_position, 0) + class _HeaderStream(object): def __init__(self): self._bytes_stream = BytesIO() diff --git a/sdk/storage/azure-storage-blob/tests/conftest.py b/sdk/storage/azure-storage-blob/tests/conftest.py index 5925851aebb9..f34e814f1728 100644 --- a/sdk/storage/azure-storage-blob/tests/conftest.py +++ b/sdk/storage/azure-storage-blob/tests/conftest.py @@ -13,9 +13,10 @@ add_header_regex_sanitizer, add_oauth_response_sanitizer, add_uri_regex_sanitizer, - test_proxy + test_proxy, ) + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") diff --git a/sdk/storage/azure-storage-blob/tests/encryption_test_helper.py b/sdk/storage/azure-storage-blob/tests/encryption_test_helper.py index 1221627327f8..40db3f1270a1 100644 --- a/sdk/storage/azure-storage-blob/tests/encryption_test_helper.py +++ b/sdk/storage/azure-storage-blob/tests/encryption_test_helper.py @@ -19,26 +19,26 @@ class KeyWrapper: - def __init__(self, kid='local:key1'): + def __init__(self, kid="local:key1"): # Must have constant key value for recorded tests, otherwise we could use a random generator. - self.kek = b'\xbe\xa4\x11K\x9eJ\x07\xdafF\x83\xad+\xadvA C\xe8\xbc\x90\xa4\x11}G\xc3\x0f\xd4\xb4\x19m\x11' + self.kek = b"\xbe\xa4\x11K\x9eJ\x07\xdafF\x83\xad+\xadvA C\xe8\xbc\x90\xa4\x11}G\xc3\x0f\xd4\xb4\x19m\x11" self.backend = default_backend() self.kid = kid - def wrap_key(self, key, algorithm='A256KW'): - if algorithm == 'A256KW': + def wrap_key(self, key, algorithm="A256KW"): + if algorithm == "A256KW": return aes_key_wrap(self.kek, key, self.backend) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) def unwrap_key(self, key, algorithm): - if algorithm == 'A256KW': + if algorithm == "A256KW": return aes_key_unwrap(self.kek, key, self.backend) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) def get_key_wrap_algorithm(self): - return 'A256KW' + return "A256KW" def get_kid(self): return self.kid @@ -56,37 +56,29 @@ def resolve_key(self, kid): class RSAKeyWrapper: - def __init__(self, kid='local:key2'): - self.private_key = generate_private_key(public_exponent=65537, - key_size=2048, - backend=default_backend()) + def __init__(self, kid="local:key2"): + self.private_key = generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) self.public_key = self.private_key.public_key() self.kid = kid - def wrap_key(self, key, algorithm='RSA'): - if algorithm == 'RSA': - return self.public_key.encrypt(key, - OAEP( - mgf=MGF1(algorithm=SHA1()), # nosec - algorithm=SHA1(), # nosec - label=None) - ) + def wrap_key(self, key, algorithm="RSA"): + if algorithm == "RSA": + return self.public_key.encrypt( + key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + ) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) def unwrap_key(self, key, algorithm): - if algorithm == 'RSA': - return self.private_key.decrypt(key, - OAEP( - mgf=MGF1(algorithm=SHA1()), # nosec - algorithm=SHA1(), # nosec - label=None) - ) + if algorithm == "RSA": + return self.private_key.decrypt( + key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + ) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) def get_key_wrap_algorithm(self): - return 'RSA' + return "RSA" def get_kid(self): return self.kid @@ -99,10 +91,10 @@ def mock_urandom(size: int) -> bytes: to be recorded. """ if size == 12: - return b'Mb\xd5N\xc2\xbd\xa0\xc8\xa4L\xfb\xa0' + return b"Mb\xd5N\xc2\xbd\xa0\xc8\xa4L\xfb\xa0" elif size == 16: - return b'\xbb\xd6\x87\xb6j\xe5\xdc\x93\xb0\x13\x1e\xcc\x9f\xf4\xca\xab' + return b"\xbb\xd6\x87\xb6j\xe5\xdc\x93\xb0\x13\x1e\xcc\x9f\xf4\xca\xab" elif size == 32: - return b'\x08\xe0A\xb6\xf2\xb7x\x8f\xe5\xdap\x87^6x~\xa4F\xc4\xe9\xb1\x8a:\xfbC%S\x0cZ\xbb\xbe\x88' + return b"\x08\xe0A\xb6\xf2\xb7x\x8f\xe5\xdap\x87^6x~\xa4F\xc4\xe9\xb1\x8a:\xfbC%S\x0cZ\xbb\xbe\x88" else: return os.urandom(size) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/_test_base_legacy.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/_test_base_legacy.py index 7c877367292e..2dc9b5078b1a 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/_test_base_legacy.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/_test_base_legacy.py @@ -42,8 +42,8 @@ def __init__(self, arguments): session.verify = False if not _LegacyServiceTest.service_client or self.args.no_client_share: _LegacyServiceTest.service_client = BlockBlobService( - connection_string=connection_string, - request_session=session) + connection_string=connection_string, request_session=session + ) _LegacyServiceTest.service_client.MAX_SINGLE_PUT_SIZE = self.args.max_put_size _LegacyServiceTest.service_client.MAX_BLOCK_SIZE = self.args.max_block_size _LegacyServiceTest.service_client.MIN_LARGE_BLOCK_UPLOAD_THRESHOLD = self.args.buffer_threshold @@ -51,20 +51,48 @@ def __init__(self, arguments): self.service_client = _LegacyServiceTest.service_client if self.args.test_proxies: - self.service_client.request_callback = functools.partial( - test_proxy_callback, - self._test_proxy_policy - ) + self.service_client.request_callback = functools.partial(test_proxy_callback, self._test_proxy_policy) @staticmethod def add_arguments(parser): super(_LegacyServiceTest, _LegacyServiceTest).add_arguments(parser) - parser.add_argument('--max-put-size', nargs='?', type=int, help='Maximum size of data uploading in single HTTP PUT. Defaults to 64*1024*1024', default=64*1024*1024) - parser.add_argument('--max-block-size', nargs='?', type=int, help='Maximum size of data in a block within a blob. Defaults to 4*1024*1024', default=4*1024*1024) - parser.add_argument('--buffer-threshold', nargs='?', type=int, help='Minimum block size to prevent full block buffering. Defaults to 4*1024*1024+1', default=4*1024*1024+1) - parser.add_argument('--max-concurrency', nargs='?', type=int, help='Maximum number of concurrent threads used for data transfer. Defaults to 1', default=1) - parser.add_argument('-s', '--size', nargs='?', type=int, help='Size of data to transfer. Default is 10240.', default=10240) - parser.add_argument('--no-client-share', action='store_true', help='Create one ServiceClient per test instance. Default is to share a single ServiceClient.', default=False) + parser.add_argument( + "--max-put-size", + nargs="?", + type=int, + help="Maximum size of data uploading in single HTTP PUT. Defaults to 64*1024*1024", + default=64 * 1024 * 1024, + ) + parser.add_argument( + "--max-block-size", + nargs="?", + type=int, + help="Maximum size of data in a block within a blob. Defaults to 4*1024*1024", + default=4 * 1024 * 1024, + ) + parser.add_argument( + "--buffer-threshold", + nargs="?", + type=int, + help="Minimum block size to prevent full block buffering. Defaults to 4*1024*1024+1", + default=4 * 1024 * 1024 + 1, + ) + parser.add_argument( + "--max-concurrency", + nargs="?", + type=int, + help="Maximum number of concurrent threads used for data transfer. Defaults to 1", + default=1, + ) + parser.add_argument( + "-s", "--size", nargs="?", type=int, help="Size of data to transfer. Default is 10240.", default=10240 + ) + parser.add_argument( + "--no-client-share", + action="store_true", + help="Create one ServiceClient per test instance. Default is to share a single ServiceClient.", + default=False, + ) class _LegacyContainerTest(_LegacyServiceTest): diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/download.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/download.py index 0d2adbfeae8a..353a5fcda989 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/download.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/download.py @@ -18,9 +18,8 @@ async def global_setup(self): await super().global_setup() data = get_random_bytes(self.args.size) self.service_client.create_blob_from_bytes( - container_name=self.container_name, - blob_name=self.blob_name, - blob=data) + container_name=self.container_name, blob_name=self.blob_name, blob=data + ) def run_sync(self): self.download_stream.reset() @@ -28,7 +27,8 @@ def run_sync(self): container_name=self.container_name, blob_name=self.blob_name, stream=self.download_stream, - max_connections=self.args.max_concurrency) + max_connections=self.args.max_concurrency, + ) async def run_async(self): raise NotImplementedError("Async not supported for legacy T1 tests.") diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/list_blobs.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/list_blobs.py index b3a55bcf23b9..57632f4bb6cf 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/list_blobs.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/list_blobs.py @@ -12,9 +12,8 @@ async def global_setup(self): await super().global_setup() for i in range(self.args.count): self.service_client.create_blob_from_bytes( - container_name=self.container_name, - blob_name="listtest" + str(i), - blob=b"") + container_name=self.container_name, blob_name="listtest" + str(i), blob=b"" + ) def run_sync(self): for _ in self.service_client.list_blobs(container_name=self.container_name): @@ -26,4 +25,6 @@ async def run_async(self): @staticmethod def add_arguments(parser): super(LegacyListBlobsTest, LegacyListBlobsTest).add_arguments(parser) - parser.add_argument('-c', '--count', nargs='?', type=int, help='Number of blobs to list. Defaults to 100', default=100) + parser.add_argument( + "-c", "--count", nargs="?", type=int, help="Number of blobs to list. Defaults to 100", default=100 + ) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload.py index 093df308829e..d6be8d1feb4f 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload.py @@ -15,14 +15,15 @@ def __init__(self, arguments): super().__init__(arguments) self.blob_name = "blobtest-" + str(uuid.uuid4()) self.upload_stream = RandomStream(self.args.size) - + def run_sync(self): self.upload_stream.reset() self.service_client.create_blob_from_stream( container_name=self.container_name, blob_name=self.blob_name, stream=self.upload_stream, - max_connections=self.args.max_concurrency) + max_connections=self.args.max_concurrency, + ) async def run_async(self): raise NotImplementedError("Async not supported for legacy T1 tests.") diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_block.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_block.py index 652092a425d1..e4f678f0e02a 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_block.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_block.py @@ -19,10 +19,8 @@ def __init__(self, arguments): def run_sync(self): self.service_client.put_block( - container_name=self.container_name, - blob_name=self.blob_name, - block=self.data, - block_id=self.block_id) + container_name=self.container_name, blob_name=self.blob_name, block=self.data, block_id=self.block_id + ) async def run_async(self): raise NotImplementedError("Async not supported for legacy T1 tests.") diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_from_file.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_from_file.py index 62d95b106b2f..4d18e997cbcc 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_from_file.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/T1_legacy_tests/upload_from_file.py @@ -35,7 +35,8 @@ def run_sync(self): container_name=self.container_name, blob_name=self.blob_name, file_path=LegacyUploadFromFileTest.temp_file, - max_connections=self.args.max_concurrency) + max_connections=self.args.max_concurrency, + ) async def run_async(self): raise NotImplementedError("Async not supported for legacy T1 tests.") diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/_test_base.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/_test_base.py index 366f66f95626..db5133c8543d 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/_test_base.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/_test_base.py @@ -21,15 +21,15 @@ class _ServiceTest(PerfStressTest): def __init__(self, arguments): super().__init__(arguments) if self.args.test_proxies: - self._client_kwargs['_additional_pipeline_policies'] = self._client_kwargs['per_retry_policies'] - self._client_kwargs['max_single_put_size'] = self.args.max_put_size - self._client_kwargs['max_block_size'] = self.args.max_block_size - self._client_kwargs['min_large_block_upload_threshold'] = self.args.buffer_threshold + self._client_kwargs["_additional_pipeline_policies"] = self._client_kwargs["per_retry_policies"] + self._client_kwargs["max_single_put_size"] = self.args.max_put_size + self._client_kwargs["max_block_size"] = self.args.max_block_size + self._client_kwargs["min_large_block_upload_threshold"] = self.args.buffer_threshold if self.args.client_encryption: self.key_encryption_key = KeyWrapper() - self._client_kwargs['require_encryption'] = True - self._client_kwargs['key_encryption_key'] = self.key_encryption_key - self._client_kwargs['encryption_version'] = self.args.client_encryption + self._client_kwargs["require_encryption"] = True + self._client_kwargs["key_encryption_key"] = self.key_encryption_key + self._client_kwargs["encryption_version"] = self.args.client_encryption # self._client_kwargs['api_version'] = '2019-02-02' # Used only for comparison with T1 legacy tests if not _ServiceTest.service_client or self.args.no_client_share: @@ -37,25 +37,25 @@ def __init__(self, arguments): tenant_id = self.get_from_env("AZURE-STORAGE-BLOB_TENANT_ID") client_id = self.get_from_env("AZURE-STORAGE-BLOB_CLIENT_ID") client_secret = self.get_from_env("AZURE-STORAGE-BLOB_CLIENT_SECRET") - sync_token_credential = SyncClientSecretCredential( - tenant_id, - client_id, - client_secret - ) - async_token_credential = AsyncClientSecretCredential( - tenant_id, - client_id, - client_secret - ) + sync_token_credential = SyncClientSecretCredential(tenant_id, client_id, client_secret) + async_token_credential = AsyncClientSecretCredential(tenant_id, client_id, client_secret) account_name = self.get_from_env("AZURE_STORAGE_ACCOUNT_NAME") # We assume these tests will only be run on the Azure public cloud for now. url = f"https://{account_name}.blob.core.windows.net" - _ServiceTest.service_client = SyncBlobServiceClient(account_url=url, credential=sync_token_credential, **self._client_kwargs) - _ServiceTest.async_service_client = AsyncBlobServiceClient(account_url=url, credential=async_token_credential, **self._client_kwargs) + _ServiceTest.service_client = SyncBlobServiceClient( + account_url=url, credential=sync_token_credential, **self._client_kwargs + ) + _ServiceTest.async_service_client = AsyncBlobServiceClient( + account_url=url, credential=async_token_credential, **self._client_kwargs + ) else: connection_string = self.get_from_env("AZURE_STORAGE_CONNECTION_STRING") - _ServiceTest.service_client = SyncBlobServiceClient.from_connection_string(conn_str=connection_string, **self._client_kwargs) - _ServiceTest.async_service_client = AsyncBlobServiceClient.from_connection_string(conn_str=connection_string, **self._client_kwargs) + _ServiceTest.service_client = SyncBlobServiceClient.from_connection_string( + conn_str=connection_string, **self._client_kwargs + ) + _ServiceTest.async_service_client = AsyncBlobServiceClient.from_connection_string( + conn_str=connection_string, **self._client_kwargs + ) self.service_client = _ServiceTest.service_client self.async_service_client = _ServiceTest.async_service_client @@ -66,15 +66,54 @@ async def close(self): @staticmethod def add_arguments(parser): super(_ServiceTest, _ServiceTest).add_arguments(parser) - parser.add_argument('--max-put-size', nargs='?', type=int, help='Maximum size of data uploading in single HTTP PUT. Defaults to 64*1024*1024', default=64*1024*1024) - parser.add_argument('--max-block-size', nargs='?', type=int, help='Maximum size of data in a block within a blob. Defaults to 4*1024*1024', default=4*1024*1024) - parser.add_argument('--buffer-threshold', nargs='?', type=int, help='Minimum block size to prevent full block buffering. Defaults to 4*1024*1024+1', default=4*1024*1024+1) - parser.add_argument('--client-encryption', nargs='?', type=str, help='The version of client-side encryption to use. Leave out for no encryption.', default=None) - parser.add_argument('--max-concurrency', nargs='?', type=int, help='Maximum number of concurrent threads used for data transfer. Defaults to 1', default=1) - parser.add_argument('-s', '--size', nargs='?', type=int, help='Size of data to transfer. Default is 10240.', default=10240) - parser.add_argument('--no-client-share', action='store_true', help='Create one ServiceClient per test instance. Default is to share a single ServiceClient.', default=False) parser.add_argument( - "--use-entra-id", action="store_true", help="Use Microsoft Entra ID authentication instead of connection string." + "--max-put-size", + nargs="?", + type=int, + help="Maximum size of data uploading in single HTTP PUT. Defaults to 64*1024*1024", + default=64 * 1024 * 1024, + ) + parser.add_argument( + "--max-block-size", + nargs="?", + type=int, + help="Maximum size of data in a block within a blob. Defaults to 4*1024*1024", + default=4 * 1024 * 1024, + ) + parser.add_argument( + "--buffer-threshold", + nargs="?", + type=int, + help="Minimum block size to prevent full block buffering. Defaults to 4*1024*1024+1", + default=4 * 1024 * 1024 + 1, + ) + parser.add_argument( + "--client-encryption", + nargs="?", + type=str, + help="The version of client-side encryption to use. Leave out for no encryption.", + default=None, + ) + parser.add_argument( + "--max-concurrency", + nargs="?", + type=int, + help="Maximum number of concurrent threads used for data transfer. Defaults to 1", + default=1, + ) + parser.add_argument( + "-s", "--size", nargs="?", type=int, help="Size of data to transfer. Default is 10240.", default=10240 + ) + parser.add_argument( + "--no-client-share", + action="store_true", + help="Create one ServiceClient per test instance. Default is to share a single ServiceClient.", + default=False, + ) + parser.add_argument( + "--use-entra-id", + action="store_true", + help="Use Microsoft Entra ID authentication instead of connection string.", ) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/key_wrapper.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/key_wrapper.py index c5e8797fb5db..a26553fa4398 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/key_wrapper.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/key_wrapper.py @@ -10,25 +10,25 @@ class KeyWrapper: - def __init__(self, kid='local:key1'): + def __init__(self, kid="local:key1"): self.kek = os.urandom(32) self.backend = default_backend() self.kid = kid - def wrap_key(self, key, algorithm='A256KW'): - if algorithm == 'A256KW': + def wrap_key(self, key, algorithm="A256KW"): + if algorithm == "A256KW": return aes_key_wrap(self.kek, key, self.backend) raise ValueError("Unknown key wrap algorithm.") def unwrap_key(self, key, algorithm): - if algorithm == 'A256KW': + if algorithm == "A256KW": return aes_key_unwrap(self.kek, key, self.backend) raise ValueError("Unknown key wrap algorithm.") def get_key_wrap_algorithm(self): - return 'A256KW' + return "A256KW" def get_kid(self): return self.kid diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/list_blobs.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/list_blobs.py index b4b074788a1d..c5e10c6abff5 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/list_blobs.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/list_blobs.py @@ -12,7 +12,10 @@ class ListBlobsTest(_ContainerTest): async def global_setup(self): await super().global_setup() - pending = (asyncio.ensure_future(self.async_container_client.upload_blob("listtest" + str(i), data=b"")) for i in range(self.args.count)) + pending = ( + asyncio.ensure_future(self.async_container_client.upload_blob("listtest" + str(i), data=b"")) + for i in range(self.args.count) + ) running = list(itertools.islice(pending, 16)) while True: # Wait for some upload to finish before adding a new one @@ -45,5 +48,12 @@ async def run_async(self): @staticmethod def add_arguments(parser): super(ListBlobsTest, ListBlobsTest).add_arguments(parser) - parser.add_argument('-c', '--count', nargs='?', type=int, help='Number of blobs to list. Defaults to 100', default=100) - parser.add_argument('--name-only', action='store_true', help='True to use list_blob_names, False to use list_blobs. Default is False.', default=False) + parser.add_argument( + "-c", "--count", nargs="?", type=int, help="Number of blobs to list. Defaults to 100", default=100 + ) + parser.add_argument( + "--name-only", + action="store_true", + help="True to use list_blob_names, False to use list_blobs. Default is False.", + default=False, + ) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload.py index 84694482694e..28a706570e68 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload.py @@ -18,15 +18,11 @@ def __init__(self, arguments): def run_sync(self): self.upload_stream.reset() self.blob_client.upload_blob( - self.upload_stream, - length=self.args.size, - overwrite=True, - max_concurrency=self.args.max_concurrency) + self.upload_stream, length=self.args.size, overwrite=True, max_concurrency=self.args.max_concurrency + ) async def run_async(self): self.upload_stream_async.reset() await self.async_blob_client.upload_blob( - self.upload_stream_async, - length=self.args.size, - overwrite=True, - max_concurrency=self.args.max_concurrency) + self.upload_stream_async, length=self.args.size, overwrite=True, max_concurrency=self.args.max_concurrency + ) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_block.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_block.py index c1a4442a374e..bf767f91b0f2 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_block.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_block.py @@ -17,11 +17,7 @@ def __init__(self, arguments): self.data = get_random_bytes(self.args.size) def run_sync(self): - self.blob_client.stage_block( - block_id=self.block_id, - data=self.data) + self.blob_client.stage_block(block_id=self.block_id, data=self.data) async def run_async(self): - await self.async_blob_client.stage_block( - block_id=self.block_id, - data=self.data) + await self.async_blob_client.stage_block(block_id=self.block_id, data=self.data) diff --git a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_from_file.py b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_from_file.py index 600d409c087d..e8aacf9b9bb7 100644 --- a/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_from_file.py +++ b/sdk/storage/azure-storage-blob/tests/perfstress_tests/upload_from_file.py @@ -26,12 +26,9 @@ async def global_cleanup(self): await super().global_cleanup() def run_sync(self): - with open(UploadFromFileTest.temp_file, 'rb') as fp: + with open(UploadFromFileTest.temp_file, "rb") as fp: self.blob_client.upload_blob(fp, max_concurrency=self.args.max_concurrency, overwrite=True) async def run_async(self): - with open(UploadFromFileTest.temp_file, 'rb') as fp: - await self.async_blob_client.upload_blob( - fp, - max_concurrency=self.args.max_concurrency, - overwrite=True) + with open(UploadFromFileTest.temp_file, "rb") as fp: + await self.async_blob_client.upload_blob(fp, max_concurrency=self.args.max_concurrency, overwrite=True) diff --git a/sdk/storage/azure-storage-blob/tests/settings/settings_fake.py b/sdk/storage/azure-storage-blob/tests/settings/settings_fake.py index e4e631c811fa..18b6045520b5 100644 --- a/sdk/storage/azure-storage-blob/tests/settings/settings_fake.py +++ b/sdk/storage/azure-storage-blob/tests/settings/settings_fake.py @@ -19,7 +19,7 @@ SOFT_DELETE_STORAGE_ACCOUNT_KEY = "fakekey" STORAGE_RESOURCE_GROUP_NAME = "fakename" -ACCOUNT_URL_SUFFIX = 'core.windows.net' +ACCOUNT_URL_SUFFIX = "core.windows.net" RUN_IN_LIVE = "False" SKIP_LIVE_RECORDING = "True" diff --git a/sdk/storage/azure-storage-blob/tests/settings/testcase.py b/sdk/storage/azure-storage-blob/tests/settings/testcase.py index 468dde0b667b..68d9a0abe8d1 100644 --- a/sdk/storage/azure-storage-blob/tests/settings/testcase.py +++ b/sdk/storage/azure-storage-blob/tests/settings/testcase.py @@ -13,7 +13,7 @@ from devtools_testutils.fake_credentials import STORAGE_ACCOUNT_FAKE_KEY try: - from cStringIO import StringIO # Python 2 + from cStringIO import StringIO # Python 2 except ImportError: from io import StringIO @@ -25,29 +25,48 @@ from .settings_fake import * -LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s' -os.environ['STORAGE_ACCOUNT_NAME'] = os.environ.get('STORAGE_ACCOUNT_NAME', None) or STORAGE_ACCOUNT_NAME -os.environ['STORAGE_ACCOUNT_KEY'] = os.environ.get('STORAGE_ACCOUNT_KEY', None) or STORAGE_ACCOUNT_KEY -os.environ['SECONDARY_STORAGE_ACCOUNT_NAME'] = os.environ.get('SECONDARY_STORAGE_ACCOUNT_NAME', None) or SECONDARY_STORAGE_ACCOUNT_NAME -os.environ['SECONDARY_STORAGE_ACCOUNT_KEY'] = os.environ.get('SECONDARY_STORAGE_ACCOUNT_KEY', None) or SECONDARY_STORAGE_ACCOUNT_KEY -os.environ['BLOB_STORAGE_ACCOUNT_NAME'] = os.environ.get('BLOB_STORAGE_ACCOUNT_NAME', None) or BLOB_STORAGE_ACCOUNT_NAME -os.environ['BLOB_STORAGE_ACCOUNT_KEY'] = os.environ.get('BLOB_STORAGE_ACCOUNT_KEY', None) or BLOB_STORAGE_ACCOUNT_KEY -os.environ['VERSIONED_STORAGE_ACCOUNT_NAME'] = os.environ.get('VERSIONED_STORAGE_ACCOUNT_NAME', None) or VERSIONED_STORAGE_ACCOUNT_NAME -os.environ['VERSIONED_STORAGE_ACCOUNT_KEY'] = os.environ.get('VERSIONED_STORAGE_ACCOUNT_KEY', None) or VERSIONED_STORAGE_ACCOUNT_KEY -os.environ['PREMIUM_STORAGE_ACCOUNT_NAME'] = os.environ.get('PREMIUM_STORAGE_ACCOUNT_NAME', None) or PREMIUM_STORAGE_ACCOUNT_NAME -os.environ['PREMIUM_STORAGE_ACCOUNT_KEY'] = os.environ.get('PREMIUM_STORAGE_ACCOUNT_KEY', None) or PREMIUM_STORAGE_ACCOUNT_KEY -os.environ['SOFT_DELETE_STORAGE_ACCOUNT_NAME'] = os.environ.get('SOFT_DELETE_STORAGE_ACCOUNT_NAME', None) or SOFT_DELETE_STORAGE_ACCOUNT_NAME -os.environ['SOFT_DELETE_STORAGE_ACCOUNT_KEY'] = os.environ.get('SOFT_DELETE_STORAGE_ACCOUNT_KEY', None) or SOFT_DELETE_STORAGE_ACCOUNT_KEY -os.environ['STORAGE_RESOURCE_GROUP_NAME'] = os.environ.get('STORAGE_RESOURCE_GROUP_NAME', None) or STORAGE_RESOURCE_GROUP_NAME +LOGGING_FORMAT = "%(asctime)s %(name)-20s %(levelname)-5s %(message)s" +os.environ["STORAGE_ACCOUNT_NAME"] = os.environ.get("STORAGE_ACCOUNT_NAME", None) or STORAGE_ACCOUNT_NAME +os.environ["STORAGE_ACCOUNT_KEY"] = os.environ.get("STORAGE_ACCOUNT_KEY", None) or STORAGE_ACCOUNT_KEY +os.environ["SECONDARY_STORAGE_ACCOUNT_NAME"] = ( + os.environ.get("SECONDARY_STORAGE_ACCOUNT_NAME", None) or SECONDARY_STORAGE_ACCOUNT_NAME +) +os.environ["SECONDARY_STORAGE_ACCOUNT_KEY"] = ( + os.environ.get("SECONDARY_STORAGE_ACCOUNT_KEY", None) or SECONDARY_STORAGE_ACCOUNT_KEY +) +os.environ["BLOB_STORAGE_ACCOUNT_NAME"] = os.environ.get("BLOB_STORAGE_ACCOUNT_NAME", None) or BLOB_STORAGE_ACCOUNT_NAME +os.environ["BLOB_STORAGE_ACCOUNT_KEY"] = os.environ.get("BLOB_STORAGE_ACCOUNT_KEY", None) or BLOB_STORAGE_ACCOUNT_KEY +os.environ["VERSIONED_STORAGE_ACCOUNT_NAME"] = ( + os.environ.get("VERSIONED_STORAGE_ACCOUNT_NAME", None) or VERSIONED_STORAGE_ACCOUNT_NAME +) +os.environ["VERSIONED_STORAGE_ACCOUNT_KEY"] = ( + os.environ.get("VERSIONED_STORAGE_ACCOUNT_KEY", None) or VERSIONED_STORAGE_ACCOUNT_KEY +) +os.environ["PREMIUM_STORAGE_ACCOUNT_NAME"] = ( + os.environ.get("PREMIUM_STORAGE_ACCOUNT_NAME", None) or PREMIUM_STORAGE_ACCOUNT_NAME +) +os.environ["PREMIUM_STORAGE_ACCOUNT_KEY"] = ( + os.environ.get("PREMIUM_STORAGE_ACCOUNT_KEY", None) or PREMIUM_STORAGE_ACCOUNT_KEY +) +os.environ["SOFT_DELETE_STORAGE_ACCOUNT_NAME"] = ( + os.environ.get("SOFT_DELETE_STORAGE_ACCOUNT_NAME", None) or SOFT_DELETE_STORAGE_ACCOUNT_NAME +) +os.environ["SOFT_DELETE_STORAGE_ACCOUNT_KEY"] = ( + os.environ.get("SOFT_DELETE_STORAGE_ACCOUNT_KEY", None) or SOFT_DELETE_STORAGE_ACCOUNT_KEY +) +os.environ["STORAGE_RESOURCE_GROUP_NAME"] = ( + os.environ.get("STORAGE_RESOURCE_GROUP_NAME", None) or STORAGE_RESOURCE_GROUP_NAME +) -os.environ['AZURE_TEST_RUN_LIVE'] = os.environ.get('AZURE_TEST_RUN_LIVE', None) or RUN_IN_LIVE -os.environ['AZURE_SKIP_LIVE_RECORDING'] = os.environ.get('AZURE_SKIP_LIVE_RECORDING', None) or SKIP_LIVE_RECORDING -os.environ['PROTOCOL'] = PROTOCOL -os.environ['ACCOUNT_URL_SUFFIX'] = ACCOUNT_URL_SUFFIX +os.environ["AZURE_TEST_RUN_LIVE"] = os.environ.get("AZURE_TEST_RUN_LIVE", None) or RUN_IN_LIVE +os.environ["AZURE_SKIP_LIVE_RECORDING"] = os.environ.get("AZURE_SKIP_LIVE_RECORDING", None) or SKIP_LIVE_RECORDING +os.environ["PROTOCOL"] = PROTOCOL +os.environ["ACCOUNT_URL_SUFFIX"] = ACCOUNT_URL_SUFFIX BlobPreparer = functools.partial( - PowerShellPreparer, "storage", + PowerShellPreparer, + "storage", storage_account_name="storagename", storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, secondary_storage_account_name="pyrmtstoragestorname", @@ -56,15 +75,16 @@ blob_storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, versioned_storage_account_name="storagenamestorname", versioned_storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, - premium_storage_account_name='pyacrstoragestorname', + premium_storage_account_name="pyacrstoragestorname", premium_storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, soft_delete_storage_account_name="storagesoftdelname", soft_delete_storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, - storage_resource_group_name="rgname" + storage_resource_group_name="rgname", ) def not_for_emulator(test): def skip_test_if_targeting_emulator(self): test(self) + return skip_test_if_targeting_emulator diff --git a/sdk/storage/azure-storage-blob/tests/test_append_blob.py b/sdk/storage/azure-storage-blob/tests/test_append_blob.py index 79228abe08aa..38d405f85a44 100644 --- a/sdk/storage/azure-storage-blob/tests/test_append_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_append_blob.py @@ -18,7 +18,10 @@ BlobServiceClient, BlobClient, BlobType, - BlobSasPermissions, BlobImmutabilityPolicyMode, ImmutabilityPolicy) + BlobSasPermissions, + BlobImmutabilityPolicyMode, + ImmutabilityPolicy, +) from azure.storage.blob._shared.policies import StorageContentValidation from devtools_testutils import recorded_by_proxy @@ -27,7 +30,7 @@ from test_helpers import NonSeekableStream, ProgressTracker # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 64 * 1024 # ------------------------------------------------------------------------------ @@ -36,8 +39,8 @@ class TestStorageAppendBlob(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") if self.is_live: try: bsc.create_container(self.container_name) @@ -53,9 +56,7 @@ def _get_blob_reference(self, prefix=TEST_BLOB_PREFIX): def _create_blob(self, bsc, tags=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) blob.create_append_blob(tags=tags) return blob @@ -69,6 +70,7 @@ def assertBlobEqual(self, blob, expected_data): stream = blob.download_blob() actual_data = stream.readall() assert actual_data == expected_data + # -------------------------------------------------------------------------- @BlobPreparer() @@ -78,7 +80,9 @@ def test_create_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob_name = self._get_blob_reference() @@ -89,8 +93,8 @@ def test_create_blob(self, **kwargs): # Assert blob_properties = blob.get_blob_properties() assert blob_properties is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -102,7 +106,8 @@ def test_get_blob_properties_using_vid(self, **kwargs): bsc = BlobServiceClient( self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, - max_block_size=4 * 1024) + max_block_size=4 * 1024, + ) self._setup(bsc) blob_name = self._get_blob_reference() @@ -111,15 +116,15 @@ def test_get_blob_properties_using_vid(self, **kwargs): blob = bsc.get_blob_client(self.container_name, blob_name) create_resp = blob.create_append_blob() # create operation will return a version id - assert create_resp['version_id'] is not None + assert create_resp["version_id"] is not None # Assert - blob_properties = blob.get_blob_properties(version_id=create_resp['version_id']) + blob_properties = blob.get_blob_properties(version_id=create_resp["version_id"]) assert blob_properties is not None assert blob_properties.is_current_version assert blob_properties.version_id is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -127,19 +132,21 @@ def test_create_blob_with_lease_id(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") create_resp = blob.create_append_blob(lease=lease) # Assert blob_properties = blob.get_blob_properties() assert blob_properties is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -147,9 +154,11 @@ def test_create_blob_with_metadata(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} blob_name = self._get_blob_reference() blob = bsc.get_blob_client(self.container_name, blob_name) @@ -166,20 +175,22 @@ def test_append_block(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act for i in range(5): - resp = blob.append_block(u'block {0}'.format(i).encode('utf-8')) - assert int(resp['blob_append_offset']) == 7 * i - assert resp['blob_committed_block_count'] == i + 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + resp = blob.append_block("block {0}".format(i).encode("utf-8")) + assert int(resp["blob_append_offset"]) == 7 * i + assert resp["blob_committed_block_count"] == i + 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None # Assert - self.assertBlobEqual(blob, b'block 0block 1block 2block 3block 4') + self.assertBlobEqual(blob, b"block 0block 1block 2block 3block 4") @BlobPreparer() @recorded_by_proxy @@ -187,7 +198,9 @@ def test_append_block_high_throughput(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=100 * 1024 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=100 * 1024 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(5 * 1024) @@ -205,18 +218,20 @@ def test_append_block_unicode(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - resp = blob.append_block(u'啊齄丂狛狜', encoding='utf-16') + resp = blob.append_block("啊齄丂狛狜", encoding="utf-16") # Assert - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None @BlobPreparer() @recorded_by_proxy @@ -224,19 +239,22 @@ def test_append_block_with_if_tags(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} blob = self._create_blob(bsc, tags=tags) with pytest.raises(ResourceModifiedError): - blob.append_block(u'啊齄丂狛狜', encoding='utf-16', if_tags_match_condition="\"tag1\"='first tag'") - resp = blob.append_block(u'啊齄丂狛狜', encoding='utf-16', if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + blob.append_block("啊齄丂狛狜", encoding="utf-16", if_tags_match_condition="\"tag1\"='first tag'") + resp = blob.append_block( + "啊齄丂狛狜", encoding="utf-16", if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + ) - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None @BlobPreparer() @recorded_by_proxy @@ -244,16 +262,18 @@ def test_append_block_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - resp = blob.append_block(b'block', validate_content=True) - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + resp = blob.append_block(b"block", validate_content=True) + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None # Assert @@ -269,7 +289,9 @@ def test_append_block_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) destination_blob_client = self._create_blob(bsc) - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = "Bearer {}".format( + self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token + ) # Assert this operation fails without a credential with pytest.raises(HttpResponseError): @@ -286,7 +308,9 @@ def test_append_block_from_url(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -305,40 +329,46 @@ def test_append_block_from_url(self, **kwargs): # Act: make append block from url calls split = 4 * 1024 - resp = destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=split) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_offset=0, source_length=split + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} destination_blob_client.set_blob_tags(tags=tags) with pytest.raises(ResourceModifiedError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=split, - source_length=LARGE_BLOB_SIZE - split, - if_tags_match_condition="\"tag1\"='first tag'") - resp = destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=split, - source_length=LARGE_BLOB_SIZE - split, - if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") - assert resp.get('blob_append_offset') == str(4 * 1024) - assert resp.get('blob_committed_block_count') == 2 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=split, + source_length=LARGE_BLOB_SIZE - split, + if_tags_match_condition="\"tag1\"='first tag'", + ) + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=split, + source_length=LARGE_BLOB_SIZE - split, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) + assert resp.get("blob_append_offset") == str(4 * 1024) + assert resp.get("blob_committed_block_count") == 2 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Missing start range shouldn't pass the validation with pytest.raises(ValueError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_length=LARGE_BLOB_SIZE) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_length=LARGE_BLOB_SIZE + ) @BlobPreparer() @recorded_by_proxy @@ -347,7 +377,9 @@ def test_append_block_from_url_and_validate_content_md5(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -366,24 +398,26 @@ def test_append_block_from_url_and_validate_content_md5(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls with correct md5 - resp = destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_content_md5=src_md5) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_content_md5=src_md5 + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_content_md5=StorageContentValidation.get_content_md5( - b"POTATO")) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_content_md5=StorageContentValidation.get_content_md5(b"POTATO"), + ) @BlobPreparer() @recorded_by_proxy @@ -392,7 +426,9 @@ def test_append_block_from_url_with_source_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -411,29 +447,32 @@ def test_append_block_from_url_with_source_if_modified(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_if_modified_since=source_blob_properties.get( - 'last_modified') - timedelta(hours=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_modified_since=source_blob_properties.get("last_modified") - timedelta(hours=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_if_modified_since=source_blob_properties.get( - 'last_modified')) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_modified_since=source_blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy @@ -442,7 +481,9 @@ def test_append_block_from_url_with_source_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -461,29 +502,32 @@ def test_append_block_from_url_with_source_if_unmodified(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_if_unmodified_since=source_blob_properties.get( - 'last_modified')) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_unmodified_since=source_blob_properties.get("last_modified"), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - destination_blob_client \ - .append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_unmodified_since=source_blob_properties.get('last_modified') - timedelta( - hours=15)) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=source_blob_properties.get("last_modified") - timedelta(hours=15), + ) @BlobPreparer() @recorded_by_proxy @@ -492,7 +536,9 @@ def test_append_block_from_url_with_source_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -511,29 +557,34 @@ def test_append_block_from_url_with_source_if_match(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag=source_blob_properties.get('etag'), - source_match_condition=MatchConditions.IfNotModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag=source_blob_properties.get("etag"), + source_match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfNotModified) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy @@ -542,7 +593,9 @@ def test_append_block_from_url_with_source_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -561,29 +614,34 @@ def test_append_block_from_url_with_source_if_none_match(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag=source_blob_properties.get('etag'), - source_match_condition=MatchConditions.IfModified) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag=source_blob_properties.get("etag"), + source_match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy @@ -592,7 +650,9 @@ def test_append_block_from_url_with_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -608,35 +668,38 @@ def test_append_block_from_url_with_if_match(self, **kwargs): ) destination_blob_name = self._get_blob_reference() - destination_blob_client = bsc.get_blob_client( - self.container_name, - destination_blob_name) + destination_blob_client = bsc.get_blob_client(self.container_name, destination_blob_name) destination_blob_properties_on_creation = destination_blob_client.create_append_blob() # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag=destination_blob_properties_on_creation.get('etag'), - match_condition=MatchConditions.IfNotModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag=destination_blob_properties_on_creation.get("etag"), + match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy @@ -645,7 +708,9 @@ def test_append_block_from_url_with_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -663,28 +728,34 @@ def test_append_block_from_url_with_if_none_match(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag='0x111111111111111', match_condition=MatchConditions.IfModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag="0x111111111111111", + match_condition=MatchConditions.IfModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag=destination_blob_properties.get('etag'), - match_condition=MatchConditions.IfModified) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag=destination_blob_properties.get("etag"), + match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy @@ -693,7 +764,9 @@ def test_append_block_from_url_with_maxsize_condition(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -711,27 +784,32 @@ def test_append_block_from_url_with_maxsize_condition(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - maxsize_condition=LARGE_BLOB_SIZE + 1) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + maxsize_condition=LARGE_BLOB_SIZE + 1, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - maxsize_condition=LARGE_BLOB_SIZE + 1) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + maxsize_condition=LARGE_BLOB_SIZE + 1, + ) @BlobPreparer() @recorded_by_proxy @@ -740,7 +818,9 @@ def test_append_block_from_url_with_appendpos_condition(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -758,27 +838,29 @@ def test_append_block_from_url_with_appendpos_condition(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - appendpos_condition=0) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_offset=0, source_length=LARGE_BLOB_SIZE, appendpos_condition=0 + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - appendpos_condition=0) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + appendpos_condition=0, + ) @BlobPreparer() @recorded_by_proxy @@ -787,7 +869,9 @@ def test_append_block_from_url_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -806,28 +890,32 @@ def test_append_block_from_url_with_if_modified(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_modified_since=source_properties.get('last_modified') - timedelta(minutes=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_modified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_modified_since=destination_blob_properties.get( - 'last_modified')) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_modified_since=destination_blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy @@ -836,7 +924,9 @@ def test_append_block_from_url_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(source_blob_data, bsc) @@ -855,28 +945,32 @@ def test_append_block_from_url_with_if_unmodified(self, **kwargs): destination_blob_client = self._create_blob(bsc) # Act part 1: make append block from url calls - resp = destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_unmodified_since=source_properties.get('last_modified') + timedelta(minutes=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=source_properties.get("last_modified") + timedelta(minutes=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_unmodified_since=source_properties.get( - 'last_modified') - timedelta(minutes=15)) + destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) @BlobPreparer() @recorded_by_proxy @@ -884,37 +978,33 @@ def test_create_append_blob_with_no_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data1'}) + data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data1"} + ) update_resp = blob.upload_blob( - data2, - overwrite=False, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data2'}) + data2, overwrite=False, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data2"} + ) props = blob.get_blob_properties() # Assert appended_data = data1 + data2 self.assertBlobEqual(blob, appended_data) - assert props.etag == update_resp.get('etag') + assert props.etag == update_resp.get("etag") assert props.blob_type == BlobType.AppendBlob - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'Data1'} + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "Data1"} assert props.size == LARGE_BLOB_SIZE + LARGE_BLOB_SIZE + 512 @BlobPreparer() @@ -923,34 +1013,30 @@ def test_create_append_blob_with_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data1'}) + data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data1"} + ) update_resp = blob.upload_blob( - data2, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data2'}) + data2, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data2"} + ) props = blob.get_blob_properties() # Assert self.assertBlobEqual(blob, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'Data2'} + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "Data2"} assert props.blob_type == BlobType.AppendBlob assert props.size == LARGE_BLOB_SIZE + 512 @@ -960,19 +1046,21 @@ def test_append_blob_from_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" append_resp = blob.upload_blob(data, blob_type=BlobType.AppendBlob) blob_properties = blob.get_blob_properties() # Assert self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp['etag'] - assert blob_properties.last_modified == append_resp['last_modified'] + assert blob_properties.etag == append_resp["etag"] + assert blob_properties.last_modified == append_resp["last_modified"] @BlobPreparer() @recorded_by_proxy @@ -980,19 +1068,21 @@ def test_append_blob_from_0_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - data = b'' + data = b"" append_resp = blob.upload_blob(data, blob_type=BlobType.AppendBlob) # Assert self.assertBlobEqual(blob, data) # appending nothing should not make any network call - assert append_resp.get('etag') is None - assert append_resp.get('last_modified') is None + assert append_resp.get("etag") is None + assert append_resp.get("last_modified") is None @BlobPreparer() @recorded_by_proxy @@ -1000,10 +1090,12 @@ def test_append_blob_from_bytes_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" # Act progress = [] @@ -1025,12 +1117,14 @@ def test_append_blob_from_bytes_with_index(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" blob.upload_blob(data[3:], blob_type=BlobType.AppendBlob) # Assert @@ -1042,12 +1136,14 @@ def test_append_blob_from_bytes_with_index_and_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" blob.upload_blob(data[3:], length=5, blob_type=BlobType.AppendBlob) # Assert @@ -1059,7 +1155,9 @@ def test_append_blob_from_bytes_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1070,8 +1168,8 @@ def test_append_blob_from_bytes_chunked_upload(self, **kwargs): # Assert self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp['etag'] - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp["etag"] + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1079,7 +1177,9 @@ def test_append_blob_from_bytes_with_progress_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1110,7 +1210,9 @@ def test_append_blob_from_bytes_chunked_upload_with_index_and_count(self, **kwar storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1121,7 +1223,7 @@ def test_append_blob_from_bytes_chunked_upload_with_index_and_count(self, **kwar blob.upload_blob(data[index:], length=blob_size, blob_type=BlobType.AppendBlob) # Assert - self.assertBlobEqual(blob, data[index:index + blob_size]) + self.assertBlobEqual(blob, data[index : index + blob_size]) @BlobPreparer() @recorded_by_proxy @@ -1129,7 +1231,9 @@ def test_append_blob_from_path_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1144,8 +1248,8 @@ def test_append_blob_from_path_chunked_upload(self, **kwargs): # Assert self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1153,7 +1257,9 @@ def test_append_blob_from_path_with_progress_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1189,7 +1295,9 @@ def test_append_blob_from_stream_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1203,8 +1311,8 @@ def test_append_blob_from_stream_chunked_upload(self, **kwargs): # Assert self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1212,7 +1320,9 @@ def test_app_blob_from_stream_nonseekable_chnked_upload_known_size(self, **kwarg storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1234,7 +1344,9 @@ def test_app_blob_from_stream_nonseekable_chnked_upload_unk_size(self, **kwargs) storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1255,7 +1367,9 @@ def test_append_blob_from_stream_with_multiple_appends(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1280,7 +1394,9 @@ def test_append_blob_from_stream_chunked_upload_with_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1302,7 +1418,9 @@ def test_append_blob_from_stream_chunked_upload_with_count_parallel(self, **kwar storage_account_key = kwargs.pop("storage_account_key") # parallel tests introduce random order of requests, can only run live - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1317,8 +1435,8 @@ def test_append_blob_from_stream_chunked_upload_with_count_parallel(self, **kwar # Assert self.assertBlobEqual(blob, data[:blob_size]) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1326,11 +1444,13 @@ def test_append_blob_from_text(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-8') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-8") # Act append_resp = blob.upload_blob(text, blob_type=BlobType.AppendBlob) @@ -1338,8 +1458,8 @@ def test_append_blob_from_text(self, **kwargs): # Assert self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1347,14 +1467,16 @@ def test_append_blob_from_text_with_encoding(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act - blob.upload_blob(text, encoding='utf-16', blob_type=BlobType.AppendBlob) + blob.upload_blob(text, encoding="utf-16", blob_type=BlobType.AppendBlob) # Assert self.assertBlobEqual(blob, data) @@ -1365,11 +1487,13 @@ def test_append_blob_from_text_with_encoding_and_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act progress = [] @@ -1379,7 +1503,7 @@ def progress_gen(upload): yield upload upload_data = progress_gen(text) - blob.upload_blob(upload_data, encoding='utf-16', blob_type=BlobType.AppendBlob) + blob.upload_blob(upload_data, encoding="utf-16", blob_type=BlobType.AppendBlob) # Assert self.assert_upload_progress(len(data), self.config.max_block_size, progress) @@ -1390,11 +1514,13 @@ def test_append_blob_from_text_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) data = self.get_random_text_data(LARGE_BLOB_SIZE) - encoded_data = data.encode('utf-8') + encoded_data = data.encode("utf-8") # Act blob.upload_blob(data, blob_type=BlobType.AppendBlob) @@ -1408,10 +1534,12 @@ def test_append_blob_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - data = b'hello world' + data = b"hello world" # Act blob.append_block(data, validate_content=True) @@ -1424,19 +1552,21 @@ def test_seal_append_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) resp = blob.seal_append_blob() - assert resp['blob_sealed'] + assert resp["blob_sealed"] with pytest.raises(HttpResponseError): blob.append_block("abc") - blob.set_blob_metadata({'isseal': 'yes'}) + blob.set_blob_metadata({"isseal": "yes"}) prop = blob.get_blob_properties() - assert prop.metadata['isseal'] == 'yes' + assert prop.metadata["isseal"] == "yes" @BlobPreparer() @recorded_by_proxy @@ -1444,14 +1574,16 @@ def test_seal_append_blob_with_append_condition(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) with pytest.raises(HttpResponseError): blob.seal_append_blob(appendpos_condition=1) resp = blob.seal_append_blob(appendpos_condition=0) - assert resp['blob_sealed'] + assert resp["blob_sealed"] @BlobPreparer() @recorded_by_proxy @@ -1459,7 +1591,9 @@ def test_copy_sealed_blob_will_get_a_sealed_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -1479,7 +1613,9 @@ def test_copy_unsealed_blob_will_get_a_sealed_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -1503,7 +1639,9 @@ def test_copy_sealed_blob_with_seal_blob_will_get_a_sealed_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -1524,41 +1662,51 @@ def test_create_append_blob_with_immutability_policy(self, **kwargs): storage_resource_group_name = kwargs.pop("storage_resource_group_name") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + max_block_size=4 * 1024, + ) self._setup(bsc) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) - + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) + # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = bsc.get_blob_client(container_name, blob_name) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=10)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked) - blob.create_append_blob(immutability_policy=immutability_policy, - legal_hold=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=10)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + blob.create_append_blob(immutability_policy=immutability_policy, legal_hold=True) props = blob.get_blob_properties() with pytest.raises(HttpResponseError): blob.delete_blob() - assert props['has_legal_hold'] - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None + assert props["has_legal_hold"] + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -1572,25 +1720,30 @@ def test_upload_progress_chunked(self, **kwargs): self._setup(bsc) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) blob_client.upload_blob( data, blob_type=BlobType.AppendBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_append_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_append_blob_async.py index 441698d07f80..e32d38bbf7a3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_append_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_append_blob_async.py @@ -21,10 +21,10 @@ from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase from settings.testcase import BlobPreparer -from test_helpers_async import NonSeekableStream, ProgressTracker +from test_helpers_async import NonSeekableStream, ProgressTracker # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 64 * 1024 # ------------------------------------------------------------------------------ @@ -33,8 +33,8 @@ class TestStorageAppendBlobAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") if self.is_live: try: await bsc.create_container(self.container_name) @@ -47,9 +47,7 @@ def _get_blob_reference(self): async def _create_blob(self, bsc, tags=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) await blob.create_append_blob(tags=tags) return blob @@ -63,6 +61,7 @@ async def assertBlobEqual(self, blob, expected_data): stream = await blob.download_blob() actual_data = await stream.readall() assert actual_data == expected_data + # -------------------------------------------------------------------------- @BlobPreparer() @@ -77,7 +76,9 @@ async def test_append_block_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) destination_blob_client = await self._create_blob(bsc) - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token( + "https://storage.azure.com/.default" + ) token = "Bearer {}".format(access_token.token) # Assert this operation fails without a credential @@ -95,7 +96,9 @@ async def test_create_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob_name = self._get_blob_reference() @@ -106,8 +109,8 @@ async def test_create_blob(self, **kwargs): # Assert blob_properties = await blob.get_blob_properties() assert blob_properties is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -115,7 +118,11 @@ async def test_get_blob_properties_using_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + max_block_size=4 * 1024, + ) await self._setup(bsc) blob_name = self._get_blob_reference() @@ -123,13 +130,13 @@ async def test_get_blob_properties_using_vid(self, **kwargs): blob = bsc.get_blob_client(self.container_name, blob_name) create_resp = await blob.create_append_blob() # create operation will return a version id - assert create_resp['version_id'] is not None + assert create_resp["version_id"] is not None # Assert - blob_properties = await blob.get_blob_properties(version_id=create_resp['version_id']) + blob_properties = await blob.get_blob_properties(version_id=create_resp["version_id"]) assert blob_properties is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -137,19 +144,21 @@ async def test_create_blob_with_lease_id(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") create_resp = await blob.create_append_blob(lease=lease) # Assert blob_properties = await blob.get_blob_properties() assert blob_properties is not None - assert blob_properties.etag == create_resp.get('etag') - assert blob_properties.last_modified == create_resp.get('last_modified') + assert blob_properties.etag == create_resp.get("etag") + assert blob_properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -157,9 +166,11 @@ async def test_create_blob_with_metadata(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} blob_name = self._get_blob_reference() blob = bsc.get_blob_client(self.container_name, blob_name) @@ -176,20 +187,22 @@ async def test_append_block(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i).encode('utf-8')) - assert int(resp['blob_append_offset']) == 7 * i - assert resp['blob_committed_block_count'] == i + 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + resp = await blob.append_block("block {0}".format(i).encode("utf-8")) + assert int(resp["blob_append_offset"]) == 7 * i + assert resp["blob_committed_block_count"] == i + 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None # Assert - await self.assertBlobEqual(blob, b'block 0block 1block 2block 3block 4') + await self.assertBlobEqual(blob, b"block 0block 1block 2block 3block 4") @BlobPreparer() @recorded_by_proxy_async @@ -197,7 +210,9 @@ async def test_append_block_high_throughput(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=100 * 1024 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=100 * 1024 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(5 * 1024) @@ -215,16 +230,18 @@ async def test_append_block_unicode(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - resp = await blob.append_block(u'啊齄丂狛狜', encoding='utf-16') - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + resp = await blob.append_block("啊齄丂狛狜", encoding="utf-16") + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None # Assert @@ -234,18 +251,22 @@ async def test_append_block_with_if_tags(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} blob = await self._create_blob(bsc, tags=tags) with pytest.raises(ResourceModifiedError): - await blob.append_block(u'啊齄丂狛狜', encoding='utf-16', if_tags_match_condition="\"tag1\"='first tag'") - resp = await blob.append_block(u'啊齄丂狛狜', encoding='utf-16', if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + await blob.append_block("啊齄丂狛狜", encoding="utf-16", if_tags_match_condition="\"tag1\"='first tag'") + resp = await blob.append_block( + "啊齄丂狛狜", encoding="utf-16", if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + ) - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None @BlobPreparer() @recorded_by_proxy_async @@ -253,16 +274,18 @@ async def test_append_block_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - resp = await blob.append_block(b'block', validate_content=True) - assert int(resp['blob_append_offset']) == 0 - assert resp['blob_committed_block_count'] == 1 - assert resp['etag'] is not None - assert resp['last_modified'] is not None + resp = await blob.append_block(b"block", validate_content=True) + assert int(resp["blob_append_offset"]) == 0 + assert resp["blob_committed_block_count"] == 1 + assert resp["etag"] is not None + assert resp["last_modified"] is not None # Assert @@ -273,7 +296,9 @@ async def test_append_block_from_url(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -292,41 +317,47 @@ async def test_append_block_from_url(self, **kwargs): # Act: make append block from url calls split = 4 * 1024 - resp = await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=split) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_offset=0, source_length=split + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} await destination_blob_client.set_blob_tags(tags=tags) with pytest.raises(ResourceModifiedError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=split, - source_length=LARGE_BLOB_SIZE - split, - if_tags_match_condition="\"tag1\"='first tag'") - resp = await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=split, - source_length=LARGE_BLOB_SIZE - split, - if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") - - assert resp.get('blob_append_offset') == str(4 * 1024) - assert resp.get('blob_committed_block_count') == 2 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=split, + source_length=LARGE_BLOB_SIZE - split, + if_tags_match_condition="\"tag1\"='first tag'", + ) + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=split, + source_length=LARGE_BLOB_SIZE - split, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) + + assert resp.get("blob_append_offset") == str(4 * 1024) + assert resp.get("blob_committed_block_count") == 2 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert blob.get('etag') == resp.get('etag') - assert blob.get('last_modified') == resp.get('last_modified') - assert blob.get('size') == LARGE_BLOB_SIZE + assert blob.get("etag") == resp.get("etag") + assert blob.get("last_modified") == resp.get("last_modified") + assert blob.get("size") == LARGE_BLOB_SIZE # Missing start range shouldn't pass the validation with pytest.raises(ValueError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_length=LARGE_BLOB_SIZE) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_length=LARGE_BLOB_SIZE + ) @BlobPreparer() @recorded_by_proxy_async @@ -335,7 +366,9 @@ async def test_append_block_from_url_and_validate_content_md5(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -354,24 +387,26 @@ async def test_append_block_from_url_and_validate_content_md5(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls with correct md5 - resp = await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_content_md5=src_md5) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_content_md5=src_md5 + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_content_md5=StorageContentValidation.get_content_md5( - b"POTATO")) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_content_md5=StorageContentValidation.get_content_md5(b"POTATO"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -380,7 +415,9 @@ async def test_append_block_from_url_with_source_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -399,29 +436,31 @@ async def test_append_block_from_url_with_source_if_modified(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_if_modified_since=source_blob_properties.get( - 'last_modified') - timedelta(hours=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_modified_since=source_blob_properties.get("last_modified") - timedelta(hours=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_if_modified_since=source_blob_properties.get( - 'last_modified')) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_modified_since=source_blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -430,7 +469,9 @@ async def test_append_block_from_url_with_source_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -449,30 +490,32 @@ async def test_append_block_from_url_with_source_if_unmodified(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_if_unmodified_since=source_blob_properties.get( - 'last_modified')) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_if_unmodified_since=source_blob_properties.get("last_modified"), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - await destination_blob_client \ - .append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_unmodified_since=source_blob_properties.get('last_modified') - timedelta( - hours=15)) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=source_blob_properties.get("last_modified") - timedelta(hours=15), + ) @BlobPreparer() @recorded_by_proxy_async @@ -481,7 +524,9 @@ async def test_append_block_from_url_with_source_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -500,29 +545,33 @@ async def test_append_block_from_url_with_source_if_match(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag=source_properties.get('etag'), - source_match_condition=MatchConditions.IfNotModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfNotModified) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -531,7 +580,9 @@ async def test_append_block_from_url_with_source_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -550,29 +601,33 @@ async def test_append_block_from_url_with_source_if_none_match(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(ResourceNotFoundError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - source_etag=source_properties.get('etag'), - source_match_condition=MatchConditions.IfModified) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -581,7 +636,9 @@ async def test_append_block_from_url_with_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -597,35 +654,37 @@ async def test_append_block_from_url_with_if_match(self, **kwargs): ) destination_blob_name = self._get_blob_reference() - destination_blob_client = bsc.get_blob_client( - self.container_name, - destination_blob_name) + destination_blob_client = bsc.get_blob_client(self.container_name, destination_blob_name) destination_blob_properties_on_creation = await destination_blob_client.create_append_blob() # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag=destination_blob_properties_on_creation.get('etag'), - match_condition=MatchConditions.IfNotModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag=destination_blob_properties_on_creation.get("etag"), + match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -634,7 +693,9 @@ async def test_append_block_from_url_with_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -652,29 +713,34 @@ async def test_append_block_from_url_with_if_none_match(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - etag='0x111111111111111', match_condition=MatchConditions.IfModified) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag="0x111111111111111", + match_condition=MatchConditions.IfModified, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - etag=destination_blob_properties.get('etag'), - match_condition=MatchConditions.IfModified) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + etag=destination_blob_properties.get("etag"), + match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -683,7 +749,9 @@ async def test_append_block_from_url_with_maxsize_condition(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -701,28 +769,32 @@ async def test_append_block_from_url_with_maxsize_condition(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - maxsize_condition=LARGE_BLOB_SIZE + 1) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + maxsize_condition=LARGE_BLOB_SIZE + 1, + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - maxsize_condition=LARGE_BLOB_SIZE + 1) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + maxsize_condition=LARGE_BLOB_SIZE + 1, + ) @BlobPreparer() @recorded_by_proxy_async @@ -731,7 +803,9 @@ async def test_append_block_from_url_with_appendpos_condition(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -749,28 +823,29 @@ async def test_append_block_from_url_with_appendpos_condition(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - appendpos_condition=0) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, source_offset=0, source_length=LARGE_BLOB_SIZE, appendpos_condition=0 + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - appendpos_condition=0) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + appendpos_condition=0, + ) @BlobPreparer() @recorded_by_proxy_async @@ -779,7 +854,9 @@ async def test_append_block_from_url_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -798,29 +875,32 @@ async def test_append_block_from_url_with_if_modified(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_modified_since=source_properties.get('last_modified') - timedelta(minutes=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_modified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - if_modified_since=destination_blob_properties.get( - 'last_modified')) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_modified_since=destination_blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -829,7 +909,9 @@ async def test_append_block_from_url_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(source_blob_data, bsc) @@ -848,29 +930,32 @@ async def test_append_block_from_url_with_if_unmodified(self, **kwargs): destination_blob_client = await self._create_blob(bsc) # Act part 1: make append block from url calls - resp = await destination_blob_client. \ - append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, source_length=LARGE_BLOB_SIZE, - if_unmodified_since=source_properties.get('last_modified') + timedelta(minutes=15)) - assert resp.get('blob_append_offset') == '0' - assert resp.get('blob_committed_block_count') == 1 - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=source_properties.get("last_modified") + timedelta(minutes=15), + ) + assert resp.get("blob_append_offset") == "0" + assert resp.get("blob_committed_block_count") == 1 + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly destination_blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(destination_blob_client, source_blob_data) - assert destination_blob_properties.get('etag') == resp.get('etag') - assert destination_blob_properties.get('last_modified') == resp.get('last_modified') - assert destination_blob_properties.get('size') == LARGE_BLOB_SIZE + assert destination_blob_properties.get("etag") == resp.get("etag") + assert destination_blob_properties.get("last_modified") == resp.get("last_modified") + assert destination_blob_properties.get("size") == LARGE_BLOB_SIZE # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - await destination_blob_client.append_block_from_url(source_blob_client.url + '?' + sas, - source_offset=0, - source_length=LARGE_BLOB_SIZE, - if_unmodified_since=destination_blob_properties.get( - 'last_modified') - timedelta(minutes=15)) + await destination_blob_client.append_block_from_url( + source_blob_client.url + "?" + sas, + source_offset=0, + source_length=LARGE_BLOB_SIZE, + if_unmodified_since=destination_blob_properties.get("last_modified") - timedelta(minutes=15), + ) @BlobPreparer() @recorded_by_proxy_async @@ -878,37 +963,33 @@ async def test_create_append_blob_with_no_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = await blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data1'}) + data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data1"} + ) update_resp = await blob.upload_blob( - data2, - overwrite=False, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data2'}) + data2, overwrite=False, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data2"} + ) props = await blob.get_blob_properties() # Assert appended_data = data1 + data2 await self.assertBlobEqual(blob, appended_data) - assert props.etag == update_resp.get('etag') + assert props.etag == update_resp.get("etag") assert props.blob_type == BlobType.AppendBlob - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'Data1'} + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "Data1"} assert props.size == LARGE_BLOB_SIZE + LARGE_BLOB_SIZE + 512 @BlobPreparer() @@ -917,34 +998,30 @@ async def test_create_append_blob_with_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = await blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data1'}) + data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data1"} + ) update_resp = await blob.upload_blob( - data2, - overwrite=True, - blob_type=BlobType.AppendBlob, - metadata={'blobdata': 'Data2'}) + data2, overwrite=True, blob_type=BlobType.AppendBlob, metadata={"blobdata": "Data2"} + ) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'Data2'} + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "Data2"} assert props.blob_type == BlobType.AppendBlob assert props.size == LARGE_BLOB_SIZE + 512 @@ -954,19 +1031,21 @@ async def test_append_blob_from_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" append_resp = await blob.upload_blob(data, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp['etag'] - assert blob_properties.last_modified == append_resp['last_modified'] + assert blob_properties.etag == append_resp["etag"] + assert blob_properties.last_modified == append_resp["last_modified"] @BlobPreparer() @recorded_by_proxy_async @@ -974,19 +1053,21 @@ async def test_append_blob_from_0_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - data = b'' + data = b"" append_resp = await blob.upload_blob(data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) # appending nothing should not make any network call - assert append_resp.get('etag') is None - assert append_resp.get('last_modified') is None + assert append_resp.get("etag") is None + assert append_resp.get("last_modified") is None @BlobPreparer() @recorded_by_proxy_async @@ -994,10 +1075,12 @@ async def test_append_blob_from_bytes_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" # Act progress = [] @@ -1019,12 +1102,14 @@ async def test_append_blob_from_bytes_with_index(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" await blob.upload_blob(data[3:], blob_type=BlobType.AppendBlob) # Assert @@ -1036,12 +1121,14 @@ async def test_append_blob_from_bytes_with_index_and_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - data = b'abcdefghijklmnopqrstuvwxyz' + data = b"abcdefghijklmnopqrstuvwxyz" await blob.upload_blob(data[3:], length=5, blob_type=BlobType.AppendBlob) # Assert @@ -1053,7 +1140,9 @@ async def test_append_blob_from_bytes_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1064,8 +1153,8 @@ async def test_append_blob_from_bytes_chunked_upload(self, **kwargs): # Assert await self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp['etag'] - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp["etag"] + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1073,7 +1162,9 @@ async def test_app_blob_from_bytes_progress_chnked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1104,7 +1195,9 @@ async def test_appblob_frm_bytes_chnked_upload_w_idx_n_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1115,7 +1208,7 @@ async def test_appblob_frm_bytes_chnked_upload_w_idx_n_count(self, **kwargs): await blob.upload_blob(data[index:], length=blob_size, blob_type=BlobType.AppendBlob) # Assert - await self.assertBlobEqual(blob, data[index:index + blob_size]) + await self.assertBlobEqual(blob, data[index : index + blob_size]) @BlobPreparer() @recorded_by_proxy_async @@ -1123,7 +1216,9 @@ async def test_append_blob_from_path_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1137,8 +1232,8 @@ async def test_append_blob_from_path_chunked_upload(self, **kwargs): # Assert await self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1146,7 +1241,9 @@ async def test_append_blob_from_path_with_progress_chunked_upload(self, **kwargs storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1182,7 +1279,9 @@ async def test_append_blob_from_stream_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1196,8 +1295,8 @@ async def test_append_blob_from_stream_chunked_upload(self, **kwargs): # Assert await self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1205,7 +1304,9 @@ async def test_append_blob_from_stream_non_seekable_chunked_upload_known_size(se storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1227,7 +1328,9 @@ async def test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size( storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1248,7 +1351,9 @@ async def test_append_blob_from_stream_with_multiple_appends(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1273,7 +1378,9 @@ async def test_append_blob_from_stream_chunked_upload_with_count(self, **kwargs) storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1295,7 +1402,9 @@ async def test_append_blob_from_stream_chunked_upload_with_count_parallel(self, storage_account_key = kwargs.pop("storage_account_key") # parallel tests introduce random order of requests, can only run live - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1310,8 +1419,8 @@ async def test_append_blob_from_stream_chunked_upload_with_count_parallel(self, # Assert await self.assertBlobEqual(blob, data[:blob_size]) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1319,11 +1428,13 @@ async def test_append_blob_from_text(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-8') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-8") # Act append_resp = await blob.upload_blob(text, blob_type=BlobType.AppendBlob) @@ -1331,8 +1442,8 @@ async def test_append_blob_from_text(self, **kwargs): # Assert await self.assertBlobEqual(blob, data) - assert blob_properties.etag == append_resp.get('etag') - assert blob_properties.last_modified == append_resp.get('last_modified') + assert blob_properties.etag == append_resp.get("etag") + assert blob_properties.last_modified == append_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1340,14 +1451,16 @@ async def test_append_blob_from_text_with_encoding(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act - await blob.upload_blob(text, encoding='utf-16', blob_type=BlobType.AppendBlob) + await blob.upload_blob(text, encoding="utf-16", blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) @@ -1358,11 +1471,13 @@ async def test_append_blob_from_text_with_encoding_and_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act progress = [] @@ -1372,7 +1487,7 @@ def progress_gen(upload): yield upload upload_data = progress_gen(text) - await blob.upload_blob(upload_data, encoding='utf-16', blob_type=BlobType.AppendBlob) + await blob.upload_blob(upload_data, encoding="utf-16", blob_type=BlobType.AppendBlob) # Assert self.assert_upload_progress(len(data), self.config.max_block_size, progress) @@ -1383,11 +1498,13 @@ async def test_append_blob_from_text_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) data = self.get_random_text_data(LARGE_BLOB_SIZE) - encoded_data = data.encode('utf-8') + encoded_data = data.encode("utf-8") # Act await blob.upload_blob(data, blob_type=BlobType.AppendBlob) @@ -1401,10 +1518,12 @@ async def test_append_blob_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - data = b'hello world' + data = b"hello world" # Act await blob.append_block(data, validate_content=True) @@ -1415,19 +1534,21 @@ async def test_seal_append_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) resp = await blob.seal_append_blob() - assert resp['blob_sealed'] + assert resp["blob_sealed"] with pytest.raises(HttpResponseError): await blob.append_block("abc") - await blob.set_blob_metadata({'isseal': 'yes'}) + await blob.set_blob_metadata({"isseal": "yes"}) prop = await blob.get_blob_properties() - assert prop.metadata['isseal'] == 'yes' + assert prop.metadata["isseal"] == "yes" @BlobPreparer() @recorded_by_proxy_async @@ -1435,14 +1556,16 @@ async def test_seal_append_blob_with_append_condition(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) with pytest.raises(HttpResponseError): await blob.seal_append_blob(appendpos_condition=1) resp = await blob.seal_append_blob(appendpos_condition=0) - assert resp['blob_sealed'] + assert resp["blob_sealed"] @BlobPreparer() @recorded_by_proxy_async @@ -1450,7 +1573,9 @@ async def test_copy_sealed_blob_will_get_a_sealed_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -1470,7 +1595,9 @@ async def test_copy_unsealed_blob_will_get_a_sealed_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -1494,7 +1621,9 @@ async def test_copy_sealed_blob_with_seal_blob_will_get_a_sealed_blob(self, **kw storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, max_block_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -1516,41 +1645,51 @@ async def test_create_append_blob_with_immutability_policy(self, **kwargs): storage_resource_group_name = kwargs.pop("storage_resource_group_name") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, max_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + max_block_size=4 * 1024, + ) await self._setup(bsc) - container_name = self.get_resource_name('vlwcontainerasync') + container_name = self.get_resource_name("vlwcontainerasync") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = bsc.get_blob_client(container_name, blob_name) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=10)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked) - await blob.create_append_blob(immutability_policy=immutability_policy, - legal_hold=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=10)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + await blob.create_append_blob(immutability_policy=immutability_policy, legal_hold=True) props = await blob.get_blob_properties() with pytest.raises(HttpResponseError): await blob.delete_blob() - assert props['has_legal_hold'] - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None + assert props["has_legal_hold"] + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -1564,25 +1703,30 @@ async def test_upload_progress_chunked(self, **kwargs): await self._setup(bsc) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) await blob_client.upload_blob( data, blob_type=BlobType.AppendBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions.py b/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions.py index c4aba5e31a80..9afeed4ca8d2 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions.py @@ -37,23 +37,21 @@ class TestStorageBlobAccessConditions(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") def _create_container(self, container_name, bsc): container = bsc.get_container_client(container_name) container.create_container() return container - def _create_container_and_block_blob(self, container_name, blob_name, - blob_data, bsc): + def _create_container_and_block_blob(self, container_name, blob_name, blob_data, bsc): container = self._create_container(container_name, bsc) blob = bsc.get_blob_client(container_name, blob_name) resp = blob.upload_blob(blob_data, length=len(blob_data)) - assert resp.get('etag') is not None + assert resp.get("etag") is not None return container, blob - def _create_container_and_page_blob(self, container_name, blob_name, - content_length, bsc): + def _create_container_and_page_blob(self, container_name, blob_name, content_length, bsc): container = self._create_container(container_name, bsc) blob = bsc.get_blob_client(container_name, blob_name) resp = blob.create_page_blob(str(content_length)) @@ -72,12 +70,13 @@ def test_get_blob_service_client_from_container(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc1 = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container_client1 = self._create_container(self.container_name, bsc1) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} # Set metadata to check against later container_client1.set_container_metadata(metadata) @@ -103,12 +102,13 @@ def test_get_container_client_from_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container_client1 = self._create_container(self.container_name, bsc) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} # Set metadata to check against later container_client1.set_container_metadata(metadata) @@ -141,13 +141,15 @@ def test_set_container_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container.set_container_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -163,14 +165,16 @@ def test_set_container_metadata_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container.set_container_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -185,18 +189,20 @@ def test_set_container_acl_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} container.set_container_access_policy(signed_identifiers, if_modified_since=test_datetime) # Assert @@ -212,18 +218,20 @@ def test_set_container_acl_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} with pytest.raises(ResourceModifiedError) as e: container.set_container_access_policy(signed_identifiers, if_modified_since=test_datetime) @@ -239,18 +247,20 @@ def test_set_container_acl_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} container.set_container_access_policy(signed_identifiers, if_unmodified_since=test_datetime) # Assert @@ -266,18 +276,20 @@ def test_set_container_acl_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} with pytest.raises(ResourceModifiedError) as e: container.set_container_access_policy(signed_identifiers, if_unmodified_since=test_datetime) @@ -293,11 +305,13 @@ def test_lease_container_acquire_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act lease = container.acquire_lease(lease_id=test_lease_id, if_modified_since=test_datetime) @@ -312,11 +326,13 @@ def test_lease_container_acquire_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act with pytest.raises(ResourceModifiedError) as e: @@ -334,11 +350,13 @@ def test_lease_container_acquire_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act lease = container.acquire_lease(lease_id=test_lease_id, if_unmodified_since=test_datetime) @@ -353,11 +371,13 @@ def test_lease_container_acquire_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act with pytest.raises(ResourceModifiedError) as e: @@ -375,10 +395,12 @@ def test_delete_container_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act deleted = container.delete_container(if_modified_since=test_datetime) @@ -397,10 +419,12 @@ def test_delete_container_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -418,10 +442,12 @@ def test_delete_container_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act container.delete_container(if_unmodified_since=test_datetime) @@ -439,10 +465,12 @@ def test_delete_container_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -462,20 +490,20 @@ def test_multi_put_block_contains_headers(self, **kwargs): def _validate_headers(request): counter.append(request) - header = request.http_request.headers.get('x-custom-header') - assert header == 'test_value' + header = request.http_request.headers.get("x-custom-header") + assert header == "test_value" bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, max_single_put_size=100, max_block_size=50) + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_put_size=100, + max_block_size=50, + ) self._setup() data = self.get_random_bytes(2 * 100) self._create_container(self.container_name, bsc) blob = bsc.get_blob_client(self.container_name, "blob1") - blob.upload_blob( - data, - headers={'x-custom-header': 'test_value'}, - raw_request_hook=_validate_headers - ) + blob.upload_blob(data, headers={"x-custom-header": "test_value"}, raw_request_hook=_validate_headers) assert len(counter) == 5 @BlobPreparer() @@ -485,18 +513,19 @@ def test_put_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act resp = blob.upload_blob(data, length=len(data), if_modified_since=test_datetime) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None return variables @@ -507,12 +536,13 @@ def test_put_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -530,18 +560,19 @@ def test_put_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act resp = blob.upload_blob(data, length=len(data), if_unmodified_since=test_datetime) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None return variables @@ -552,12 +583,13 @@ def test_put_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -574,18 +606,19 @@ def test_put_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) etag = blob.get_blob_properties().etag # Act resp = blob.upload_blob(data, length=len(data), etag=etag, match_condition=MatchConditions.IfNotModified) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None with pytest.raises(ValueError): blob.upload_blob(data, length=len(data), etag=etag) @@ -598,20 +631,22 @@ def test_put_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) # Act with pytest.raises(ResourceModifiedError) as e: blob.upload_blob( data, length=len(data), - etag='0x111111111111111', + etag="0x111111111111111", match_condition=MatchConditions.IfNotModified, - overwrite=True) + overwrite=True, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -622,19 +657,22 @@ def test_put_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) # Act - resp = blob.upload_blob(data, length=len(data), etag='0x111111111111111', match_condition=MatchConditions.IfModified) + resp = blob.upload_blob( + data, length=len(data), etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None with pytest.raises(ValueError): - blob.upload_blob(data, length=len(data), etag='0x111111111111111') + blob.upload_blob(data, length=len(data), etag="0x111111111111111") with pytest.raises(ValueError): blob.upload_blob(data, length=len(data), match_condition=MatchConditions.IfModified) @@ -644,16 +682,19 @@ def test_put_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) etag = blob.get_blob_properties().etag # Act with pytest.raises(ResourceModifiedError) as e: - blob.upload_blob(data, length=len(data), etag=etag, match_condition=MatchConditions.IfModified, overwrite=True) + blob.upload_blob( + data, length=len(data), etag=etag, match_condition=MatchConditions.IfModified, overwrite=True + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -665,17 +706,18 @@ def test_get_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act content = blob.download_blob(if_modified_since=test_datetime).readall() # Assert - assert content == b'hello world' + assert content == b"hello world" return variables @@ -686,11 +728,12 @@ def test_get_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -708,17 +751,18 @@ def test_get_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act content = blob.download_blob(if_unmodified_since=test_datetime).readall() # Assert - assert content == b'hello world' + assert content == b"hello world" return variables @@ -729,11 +773,12 @@ def test_get_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -750,17 +795,18 @@ def test_get_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) etag = blob.get_blob_properties().etag # Act content = blob.download_blob(etag=etag, match_condition=MatchConditions.IfNotModified).readall() # Assert - assert content == b'hello world' + assert content == b"hello world" @BlobPreparer() @recorded_by_proxy @@ -768,14 +814,15 @@ def test_get_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -786,16 +833,17 @@ def test_get_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - content = blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfModified).readall() + content = blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfModified).readall() # Assert - assert content == b'hello world' + assert content == b"hello world" @BlobPreparer() @recorded_by_proxy @@ -803,10 +851,11 @@ def test_get_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) etag = blob.get_blob_properties().etag # Act @@ -823,16 +872,15 @@ def test_set_blob_properties_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_http_headers(content_settings, if_modified_since=test_datetime) # Assert @@ -849,17 +897,16 @@ def test_set_blob_properties_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_http_headers(content_settings, if_modified_since=test_datetime) # Assert @@ -874,16 +921,15 @@ def test_set_blob_properties_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_http_headers(content_settings, if_unmodified_since=test_datetime) # Assert @@ -900,17 +946,16 @@ def test_set_blob_properties_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_http_headers(content_settings, if_unmodified_since=test_datetime) # Assert @@ -925,17 +970,18 @@ def test_get_properties_last_access_time(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob(self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") lat = blob.get_blob_properties().last_accessed_on self.sleep(5) # Act - blob.stage_block(block_id='1', data="this is test content") - blob.commit_block_list(['1']) + blob.stage_block(block_id="1", data="this is test content") + blob.commit_block_list(["1"]) new_lat = blob.get_blob_properties().last_accessed_on # Assert @@ -950,17 +996,16 @@ def test_set_blob_properties_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") blob.set_http_headers(content_settings, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert @@ -974,18 +1019,19 @@ def test_set_blob_properties_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.set_http_headers(content_settings, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.set_http_headers( + content_settings, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -996,17 +1042,16 @@ def test_set_blob_properties_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.set_http_headers(content_settings, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.set_http_headers(content_settings, etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert properties = blob.get_blob_properties() @@ -1019,18 +1064,17 @@ def test_set_blob_properties_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") blob.set_http_headers(content_settings, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -1043,20 +1087,21 @@ def test_get_blob_properties_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") properties = blob.get_blob_properties(if_modified_since=test_datetime) # Assert assert isinstance(properties, BlobProperties) - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" return variables @@ -1066,16 +1111,19 @@ def test_if_blob_exists_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + connection_data_block_size=4 * 1024, + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") old_blob_version_id = blob.get_blob_properties().get("version_id") assert old_blob_version_id is not None - blob.stage_block(block_id='1', data="this is test content") - blob.commit_block_list(['1']) + blob.stage_block(block_id="1", data="this is test content") + blob.commit_block_list(["1"]) new_blob_version_id = blob.get_blob_properties().get("version_id") # Assert @@ -1085,10 +1133,10 @@ def test_if_blob_exists_vid(self, **kwargs): # Act test_snapshot = blob.create_snapshot() - blob_snapshot = bsc.get_blob_client(self.container_name, 'blob1', snapshot=test_snapshot) + blob_snapshot = bsc.get_blob_client(self.container_name, "blob1", snapshot=test_snapshot) assert blob_snapshot.exists() - blob.stage_block(block_id='1', data="this is additional test content") - blob.commit_block_list(['1']) + blob.stage_block(block_id="1", data="this is additional test content") + blob.commit_block_list(["1"]) # Assert assert blob_snapshot.exists() @@ -1102,8 +1150,11 @@ def test_if_blob_with_cpk_exists(self, **kwargs): container_name = self.get_resource_name("testcontainer1") cc = ContainerClient( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name=container_name, - connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name=container_name, + connection_data_block_size=4 * 1024, + ) cc.create_container() self._setup() test_cpk = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) @@ -1119,14 +1170,15 @@ def test_get_blob_properties_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.get_blob_properties(if_modified_since=test_datetime) # Assert @@ -1141,20 +1193,21 @@ def test_get_blob_properties_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") properties = blob.get_blob_properties(if_unmodified_since=test_datetime) # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" return variables @@ -1165,14 +1218,15 @@ def test_get_blob_properties_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.get_blob_properties(if_unmodified_since=test_datetime) # Assert @@ -1186,11 +1240,12 @@ def test_get_blob_properties_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1198,9 +1253,9 @@ def test_get_blob_properties_with_if_match(self, **kwargs): # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" @BlobPreparer() @recorded_by_proxy @@ -1208,15 +1263,16 @@ def test_get_blob_properties_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1227,20 +1283,21 @@ def test_get_blob_properties_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - properties = blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + properties = blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" @BlobPreparer() @recorded_by_proxy @@ -1248,11 +1305,12 @@ def test_get_blob_properties_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1269,14 +1327,15 @@ def test_get_blob_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") md = blob.get_blob_properties(if_modified_since=test_datetime).metadata # Assert @@ -1291,15 +1350,16 @@ def test_get_blob_metadata_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.get_blob_properties(if_modified_since=test_datetime).metadata # Assert @@ -1314,14 +1374,15 @@ def test_get_blob_metadata_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") md = blob.get_blob_properties(if_unmodified_since=test_datetime).metadata # Assert @@ -1336,15 +1397,16 @@ def test_get_blob_metadata_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.get_blob_properties(if_unmodified_since=test_datetime).metadata # Assert @@ -1358,11 +1420,12 @@ def test_get_blob_metadata_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1377,15 +1440,16 @@ def test_get_blob_metadata_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified).metadata + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified).metadata # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1396,14 +1460,15 @@ def test_get_blob_metadata_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - md = blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfModified).metadata + blob = bsc.get_blob_client(self.container_name, "blob1") + md = blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfModified).metadata # Assert assert md is not None @@ -1414,11 +1479,12 @@ def test_get_blob_metadata_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1435,15 +1501,16 @@ def test_set_blob_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_blob_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -1459,16 +1526,17 @@ def test_set_blob_metadata_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_blob_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -1483,15 +1551,16 @@ def test_set_blob_metadata_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_blob_metadata(metadata, if_unmodified_since=test_datetime) # Assert @@ -1507,16 +1576,17 @@ def test_set_blob_metadata_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") blob.set_blob_metadata(metadata, if_unmodified_since=test_datetime) # Assert @@ -1530,15 +1600,16 @@ def test_set_blob_metadata_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} blob.set_blob_metadata(metadata, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert @@ -1551,16 +1622,17 @@ def test_set_blob_metadata_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.set_blob_metadata(metadata, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.set_blob_metadata(metadata, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1571,15 +1643,16 @@ def test_set_blob_metadata_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.set_blob_metadata(metadata, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.set_blob_metadata(metadata, etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert md = blob.get_blob_properties().metadata @@ -1591,16 +1664,17 @@ def test_set_blob_metadata_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} blob.set_blob_metadata(metadata, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -1613,14 +1687,15 @@ def test_delete_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = blob.delete_blob(if_modified_since=test_datetime) # Assert @@ -1635,14 +1710,15 @@ def test_delete_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: blob.delete_blob(if_modified_since=test_datetime) @@ -1658,14 +1734,15 @@ def test_delete_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = blob.delete_blob(if_unmodified_since=test_datetime) # Assert @@ -1680,14 +1757,15 @@ def test_delete_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: blob.delete_blob(if_unmodified_since=test_datetime) @@ -1702,11 +1780,12 @@ def test_delete_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1722,15 +1801,16 @@ def test_delete_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: - blob.delete_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.delete_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1741,14 +1821,15 @@ def test_delete_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - resp = blob.delete_blob(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + resp = blob.delete_blob(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert resp is None @@ -1759,11 +1840,12 @@ def test_delete_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1780,19 +1862,20 @@ def test_snapshot_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = blob.create_snapshot(if_modified_since=test_datetime) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None return variables @@ -1803,15 +1886,16 @@ def test_snapshot_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.create_snapshot(if_modified_since=test_datetime) # Assert @@ -1826,19 +1910,20 @@ def test_snapshot_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = blob.create_snapshot(if_unmodified_since=test_datetime) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None return variables @@ -1849,15 +1934,16 @@ def test_snapshot_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.create_snapshot(if_unmodified_since=test_datetime) # Assert @@ -1871,11 +1957,12 @@ def test_snapshot_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1883,7 +1970,7 @@ def test_snapshot_blob_with_if_match(self, **kwargs): # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy @@ -1891,15 +1978,16 @@ def test_snapshot_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.create_snapshot(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.create_snapshot(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1910,18 +1998,19 @@ def test_snapshot_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - resp = blob.create_snapshot(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + resp = blob.create_snapshot(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy @@ -1929,11 +2018,12 @@ def test_snapshot_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -1950,18 +2040,17 @@ def test_lease_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - lease = blob.acquire_lease( - if_modified_since=test_datetime, - lease_id=test_lease_id) + blob = bsc.get_blob_client(self.container_name, "blob1") + lease = blob.acquire_lease(if_modified_since=test_datetime, lease_id=test_lease_id) lease.break_lease() @@ -1978,16 +2067,17 @@ def test_lease_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.acquire_lease(lease_id=test_lease_id, if_modified_since=test_datetime) # Assert @@ -2002,18 +2092,17 @@ def test_lease_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - lease = blob.acquire_lease( - if_unmodified_since=test_datetime, - lease_id=test_lease_id) + blob = bsc.get_blob_client(self.container_name, "blob1") + lease = blob.acquire_lease(if_unmodified_since=test_datetime, lease_id=test_lease_id) lease.break_lease() @@ -2030,15 +2119,16 @@ def test_lease_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: blob.acquire_lease(lease_id=test_lease_id, if_unmodified_since=test_datetime) @@ -2053,18 +2143,17 @@ def test_lease_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act - lease = blob.acquire_lease( - lease_id=test_lease_id, - etag=etag, match_condition=MatchConditions.IfNotModified) + lease = blob.acquire_lease(lease_id=test_lease_id, etag=etag, match_condition=MatchConditions.IfNotModified) lease.break_lease() @@ -2080,16 +2169,19 @@ def test_lease_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: - blob.acquire_lease(lease_id=test_lease_id, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.acquire_lease( + lease_id=test_lease_id, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2100,18 +2192,18 @@ def test_lease_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") lease = blob.acquire_lease( - lease_id=test_lease_id, - etag='0x111111111111111', - match_condition=MatchConditions.IfModified) + lease_id=test_lease_id, etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) lease.break_lease() @@ -2125,13 +2217,14 @@ def test_lease_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act with pytest.raises(ResourceModifiedError) as e: @@ -2147,22 +2240,23 @@ def test_put_block_list_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, if_modified_since=test_datetime) # Assert content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" return variables @@ -2172,22 +2266,25 @@ def test_put_block_list_returns_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + connection_data_block_size=4 * 1024, + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] resp = blob.commit_block_list(block_list) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy @@ -2195,23 +2292,24 @@ def test_put_block_list_with_metadata(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - metadata = {'hello': 'world', 'number': '43'} - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + metadata = {"hello": "world", "number": "43"} + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, metadata=metadata) # Assert content = blob.download_blob() properties = blob.get_blob_properties() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" assert properties.metadata == metadata @BlobPreparer() @@ -2221,20 +2319,22 @@ def test_put_block_list_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - if_modified_since=test_datetime) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + if_modified_since=test_datetime, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2248,22 +2348,23 @@ def test_put_block_list_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, if_unmodified_since=test_datetime) # Assert content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" return variables @@ -2274,20 +2375,22 @@ def test_put_block_list_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - if_unmodified_since=test_datetime) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + if_unmodified_since=test_datetime, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2300,22 +2403,23 @@ def test_put_block_list_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") etag = blob.get_blob_properties().etag # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy @@ -2323,19 +2427,22 @@ def test_put_block_list_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act with pytest.raises(ResourceModifiedError) as e: blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2346,21 +2453,22 @@ def test_put_block_list_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - blob.commit_block_list(block_list, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + blob.commit_block_list(block_list, etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' + assert content.readall() == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy @@ -2368,18 +2476,19 @@ def test_put_block_list_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + container, blob = self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") etag = blob.get_blob_properties().etag # Act with pytest.raises(ResourceModifiedError) as e: - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -2392,15 +2501,16 @@ def test_update_page_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.upload_page(data, offset=0, length=512, if_modified_since=test_datetime) return variables @@ -2412,15 +2522,16 @@ def test_update_page_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: blob.upload_page(data, offset=0, length=512, if_modified_since=test_datetime) @@ -2436,15 +2547,16 @@ def test_update_page_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") blob.upload_page(data, offset=0, length=512, if_unmodified_since=test_datetime) return variables @@ -2456,15 +2568,16 @@ def test_update_page_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: blob.upload_page(data, offset=0, length=512, if_unmodified_since=test_datetime) @@ -2479,12 +2592,13 @@ def test_update_page_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -2498,16 +2612,19 @@ def test_update_page_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: - blob.upload_page(data, offset=0, length=512, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.upload_page( + data, offset=0, length=512, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2518,15 +2635,18 @@ def test_update_page_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - blob.upload_page(data, offset=0, length=512, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + blob.upload_page( + data, offset=0, length=512, etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert @@ -2536,12 +2656,13 @@ def test_update_page_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 - blob = bsc.get_blob_client(self.container_name, 'blob1') + self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 + blob = bsc.get_blob_client(self.container_name, "blob1") etag = blob.get_blob_properties().etag # Act @@ -2558,12 +2679,13 @@ def test_get_page_ranges_iter_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) @@ -2572,8 +2694,8 @@ def test_get_page_ranges_iter_with_if_modified(self, **kwargs): # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} return variables @@ -2584,12 +2706,13 @@ def test_get_page_ranges_iter_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) @@ -2609,12 +2732,13 @@ def test_get_page_ranges_iter_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) @@ -2623,8 +2747,8 @@ def test_get_page_ranges_iter_with_if_unmodified(self, **kwargs): # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} return variables @@ -2635,12 +2759,13 @@ def test_get_page_ranges_iter_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) @@ -2659,11 +2784,12 @@ def test_get_page_ranges_iter_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) etag = blob.get_blob_properties().etag @@ -2673,8 +2799,8 @@ def test_get_page_ranges_iter_with_if_match(self, **kwargs): # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} @BlobPreparer() @recorded_by_proxy @@ -2682,17 +2808,18 @@ def test_get_page_ranges_iter_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) # Act with pytest.raises(ResourceModifiedError) as e: - blob.get_page_ranges(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.get_page_ranges(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2703,21 +2830,22 @@ def test_get_page_ranges_iter_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) # Act - ranges = blob.get_page_ranges(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + ranges = blob.get_page_ranges(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} @BlobPreparer() @recorded_by_proxy @@ -2725,11 +2853,12 @@ def test_get_page_ranges_iter_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 + container, blob = self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 blob.upload_page(data, offset=0, length=512) blob.upload_page(data, offset=1024, length=512) @@ -2749,18 +2878,20 @@ def test_append_block_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), if_modified_since=test_datetime) + resp = blob.append_block("block {0}".format(i), if_modified_since=test_datetime) assert resp is not None # Assert content = blob.download_blob().readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content return variables @@ -2771,14 +2902,16 @@ def test_append_block_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), if_modified_since=test_datetime) + resp = blob.append_block("block {0}".format(i), if_modified_since=test_datetime) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2792,18 +2925,20 @@ def test_append_block_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), if_unmodified_since=test_datetime) + resp = blob.append_block("block {0}".format(i), if_unmodified_since=test_datetime) assert resp is not None # Assert content = blob.download_blob().readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content return variables @@ -2814,14 +2949,16 @@ def test_append_block_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), if_unmodified_since=test_datetime) + resp = blob.append_block("block {0}".format(i), if_unmodified_since=test_datetime) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2834,19 +2971,21 @@ def test_append_block_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act for i in range(5): etag = blob.get_blob_properties().etag - resp = blob.append_block(u'block {0}'.format(i), etag=etag, match_condition=MatchConditions.IfNotModified) + resp = blob.append_block("block {0}".format(i), etag=etag, match_condition=MatchConditions.IfNotModified) assert resp is not None # Assert content = blob.download_blob().readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content @BlobPreparer() @recorded_by_proxy @@ -2854,14 +2993,18 @@ def test_append_block_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act with pytest.raises(HttpResponseError) as e: for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + resp = blob.append_block( + "block {0}".format(i), etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) @BlobPreparer() @recorded_by_proxy @@ -2869,18 +3012,22 @@ def test_append_block_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act for i in range(5): - resp = blob.append_block(u'block {0}'.format(i), etag='0x8D2C9167D53FC2C', match_condition=MatchConditions.IfModified) + resp = blob.append_block( + "block {0}".format(i), etag="0x8D2C9167D53FC2C", match_condition=MatchConditions.IfModified + ) assert resp is not None # Assert content = blob.download_blob().readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content @BlobPreparer() @recorded_by_proxy @@ -2888,15 +3035,17 @@ def test_append_block_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): etag = blob.get_blob_properties().etag - resp = blob.append_block(u'block {0}'.format(i), etag=etag, match_condition=MatchConditions.IfModified) + resp = blob.append_block("block {0}".format(i), etag=etag, match_condition=MatchConditions.IfModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2908,11 +3057,13 @@ def test_append_blob_from_bytes_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) @@ -2931,11 +3082,13 @@ def test_append_blob_from_bytes_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2953,11 +3106,13 @@ def test_append_blob_from_bytes_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) @@ -2976,11 +3131,13 @@ def test_append_blob_from_bytes_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2997,7 +3154,9 @@ def test_append_blob_from_bytes_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) @@ -3005,7 +3164,9 @@ def test_append_blob_from_bytes_with_if_match(self, **kwargs): # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified) + blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified + ) # Assert content = blob.download_blob().readall() @@ -3017,16 +3178,20 @@ def test_append_blob_from_bytes_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_etag = '0x8D2C9167D53FC2C' + test_etag = "0x8D2C9167D53FC2C" # Act with pytest.raises(ResourceModifiedError) as e: data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified) + blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified + ) assert StorageErrorCode.condition_not_met == e.value.error_code @@ -3036,15 +3201,19 @@ def test_append_blob_from_bytes_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_etag = '0x8D2C9167D53FC2C' + test_etag = "0x8D2C9167D53FC2C" # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified) + blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified + ) # Assert content = blob.download_blob().readall() @@ -3056,7 +3225,9 @@ def test_append_blob_from_bytes_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = self._create_container_and_append_blob(self.container_name, blob_name, bsc) @@ -3065,7 +3236,9 @@ def test_append_blob_from_bytes_with_if_none_match_fail(self, **kwargs): # Act with pytest.raises(ResourceModifiedError) as e: data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified) + blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified + ) assert StorageErrorCode.condition_not_met == e.value.error_code @@ -3076,13 +3249,13 @@ def test_header_metadata_sort_in_upload_blob_fails(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Relevant ASCII characters (excluding 'Bad Request' values) ascii_subset = "!#$%&*+.-^_~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz|~" @@ -3090,7 +3263,7 @@ def test_header_metadata_sort_in_upload_blob_fails(self, **kwargs): # Build out metadata metadata = dict() for c in ascii_subset: - metadata[c] = 'a' + metadata[c] = "a" # Act # If we hit invalid metadata error, that means we have successfully sorted headers properly to pass auth error @@ -3107,19 +3280,44 @@ def test_header_metadata_sort_in_upload_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Hand-picked metadata examples as Python & service don't sort '_' with the same weight - metadata = {'a0': 'a', 'a1': 'a', 'a2': 'a', 'a3': 'a', 'a4': 'a', 'a5': 'a', 'a6': 'a', 'a7': 'a', 'a8': 'a', - 'a9': 'a', '_': 'a', '_a': 'a', 'a_': 'a', '__': 'a', '_a_': 'a', 'b': 'a', 'c': 'a', 'y': 'a', - 'z': 'z_', '_z': 'a', '_F': 'a', 'F': 'a', 'F_': 'a', '_F_': 'a', '__F': 'a', '__a': 'a', 'a__': 'a' - } + metadata = { + "a0": "a", + "a1": "a", + "a2": "a", + "a3": "a", + "a4": "a", + "a5": "a", + "a6": "a", + "a7": "a", + "a8": "a", + "a9": "a", + "_": "a", + "_a": "a", + "a_": "a", + "__": "a", + "_a_": "a", + "b": "a", + "c": "a", + "y": "a", + "z": "z_", + "_z": "a", + "_F": "a", + "F": "a", + "F_": "a", + "_F_": "a", + "__F": "a", + "__a": "a", + "a__": "a", + } # Act blob_client.upload_blob(data, length=len(data), metadata=metadata) @@ -3131,30 +3329,30 @@ def test_header_metadata_sort_in_upload_blob_translation(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Hand-picked metadata examples that sorted incorrectly with our previous implementation. metadata = { - 'test': 'val', - 'test-': 'val', - 'test--': 'val', - 'test-_': 'val', - 'test_-': 'val', - 'test__': 'val', - 'test-a': 'val', - 'test-A': 'val', - 'test-_A': 'val', - 'test_a': 'val', - 'test_Z': 'val', - 'test_a_': 'val', - 'test_a-': 'val', - 'test_a-_': 'val', + "test": "val", + "test-": "val", + "test--": "val", + "test-_": "val", + "test_-": "val", + "test__": "val", + "test-a": "val", + "test-A": "val", + "test-_A": "val", + "test_a": "val", + "test_Z": "val", + "test_a_": "val", + "test_a-": "val", + "test_a-_": "val", } # Act @@ -3165,4 +3363,5 @@ def test_header_metadata_sort_in_upload_blob_translation(self, **kwargs): # Assert assert StorageErrorCode.invalid_metadata == e.value.error_code + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions_async.py index ac2a864851ed..eeebfc706cfa 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_access_conditions_async.py @@ -40,7 +40,7 @@ class TestStorageBlobAccessConditionsAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") async def _create_container(self, container_name, bsc): container = bsc.get_container_client(container_name) @@ -51,7 +51,7 @@ async def _create_container_and_block_blob(self, container_name, blob_name, blob container = await self._create_container(container_name, bsc) blob = bsc.get_blob_client(container_name, blob_name) resp = await blob.upload_blob(blob_data, length=len(blob_data)) - assert resp.get('etag') is not None + assert resp.get("etag") is not None return container, blob async def _create_container_and_page_blob(self, container_name, blob_name, content_length, bsc): @@ -73,12 +73,13 @@ async def test_get_blob_service_client_from_container(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc1 = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container_client1 = await self._create_container(self.container_name, bsc1) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} # Set metadata to check against later await container_client1.set_container_metadata(metadata) @@ -106,12 +107,13 @@ async def test_get_container_client_from_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container_client1 = await self._create_container(self.container_name, bsc) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} # Set metadata to check against later await container_client1.set_container_metadata(metadata) @@ -148,13 +150,15 @@ async def test_set_container_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} await container.set_container_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -170,14 +174,16 @@ async def test_set_container_md_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} await container.set_container_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -192,18 +198,20 @@ async def test_set_container_acl_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} await container.set_container_access_policy(signed_identifiers, if_modified_since=test_datetime) # Assert @@ -219,18 +227,20 @@ async def test_set_container_acl_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} with pytest.raises(ResourceModifiedError) as e: await container.set_container_access_policy(signed_identifiers, if_modified_since=test_datetime) @@ -246,18 +256,20 @@ async def test_set_container_acl_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} await container.set_container_access_policy(signed_identifiers, if_unmodified_since=test_datetime) # Assert @@ -266,7 +278,6 @@ async def test_set_container_acl_with_if_unmodified(self, **kwargs): return variables - @BlobPreparer() @recorded_by_proxy_async async def test_set_container_acl_with_if_unmodified_fail(self, **kwargs): @@ -274,18 +285,20 @@ async def test_set_container_acl_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) # Act - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} with pytest.raises(ResourceModifiedError) as e: await container.set_container_access_policy(signed_identifiers, if_unmodified_since=test_datetime) @@ -301,11 +314,13 @@ async def test_lease_container_acquire_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act lease = await container.acquire_lease(lease_id=test_lease_id, if_modified_since=test_datetime) @@ -320,11 +335,13 @@ async def test_lease_cont_acquire_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -342,11 +359,13 @@ async def test_lease_container_acquire_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act lease = await container.acquire_lease(lease_id=test_lease_id, if_unmodified_since=test_datetime) @@ -361,11 +380,13 @@ async def test_lease_container_acquire_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -383,10 +404,12 @@ async def test_delete_container_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act deleted = await container.delete_container(if_modified_since=test_datetime) @@ -404,10 +427,12 @@ async def test_delete_container_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: await container.delete_container(if_modified_since=test_datetime) @@ -424,10 +449,12 @@ async def test_delete_container_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act await container.delete_container(if_unmodified_since=test_datetime) @@ -444,10 +471,12 @@ async def test_delete_container_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() container = await self._create_container(self.container_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: await container.delete_container(if_unmodified_since=test_datetime) @@ -466,20 +495,20 @@ async def test_multi_put_block_contains_headers(self, **kwargs): def _validate_headers(request): counter.append(request) - header = request.http_request.headers.get('x-custom-header') - assert header == 'test_value' + header = request.http_request.headers.get("x-custom-header") + assert header == "test_value" bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key, max_single_put_size=100, max_block_size=50) + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_put_size=100, + max_block_size=50, + ) self._setup() data = self.get_random_bytes(2 * 100) await self._create_container(self.container_name, bsc) blob = bsc.get_blob_client(self.container_name, "blob1") - await blob.upload_blob( - data, - headers={'x-custom-header': 'test_value'}, - raw_request_hook=_validate_headers - ) + await blob.upload_blob(data, headers={"x-custom-header": "test_value"}, raw_request_hook=_validate_headers) assert len(counter) == 5 @BlobPreparer() @@ -489,18 +518,19 @@ async def test_put_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act resp = await blob.upload_blob(data, length=len(data), if_modified_since=test_datetime) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None return variables @@ -511,12 +541,13 @@ async def test_put_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -534,18 +565,19 @@ async def test_put_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act resp = await blob.upload_blob(data, length=len(data), if_unmodified_since=test_datetime) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None return variables @@ -556,12 +588,13 @@ async def test_put_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -578,18 +611,19 @@ async def test_put_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) etag = (await blob.get_blob_properties()).etag # Act resp = await blob.upload_blob(data, length=len(data), etag=etag, match_condition=MatchConditions.IfNotModified) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy_async @@ -597,17 +631,22 @@ async def test_put_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) # Act with pytest.raises(ResourceModifiedError) as e: await blob.upload_blob( - data, length=len(data), etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified, overwrite=True) + data, + length=len(data), + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + overwrite=True, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -618,17 +657,20 @@ async def test_put_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) # Act - resp = await blob.upload_blob(data, length=len(data), etag='0x111111111111111', match_condition=MatchConditions.IfModified) + resp = await blob.upload_blob( + data, length=len(data), etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy_async @@ -636,16 +678,19 @@ async def test_put_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - data = b'hello world' - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', data, bsc) + data = b"hello world" + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", data, bsc) etag = (await blob.get_blob_properties()).etag # Act with pytest.raises(ResourceModifiedError) as e: - await blob.upload_blob(data, length=len(data), etag=etag, match_condition=MatchConditions.IfModified, overwrite=True) + await blob.upload_blob( + data, length=len(data), etag=etag, match_condition=MatchConditions.IfModified, overwrite=True + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -657,18 +702,19 @@ async def test_get_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act content = await blob.download_blob(if_modified_since=test_datetime) content = await content.readall() # Assert - assert content == b'hello world' + assert content == b"hello world" return variables @@ -679,11 +725,12 @@ async def test_get_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -701,18 +748,19 @@ async def test_get_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act content = await blob.download_blob(if_unmodified_since=test_datetime) content = await content.readall() # Assert - assert content == b'hello world' + assert content == b"hello world" return variables @@ -723,11 +771,12 @@ async def test_get_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -744,10 +793,11 @@ async def test_get_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) etag = (await blob.get_blob_properties()).etag # Act @@ -755,7 +805,7 @@ async def test_get_blob_with_if_match(self, **kwargs): content = await content.readall() # Assert - assert content == b'hello world' + assert content == b"hello world" @BlobPreparer() @recorded_by_proxy_async @@ -763,14 +813,15 @@ async def test_get_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - await blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + await blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -781,17 +832,18 @@ async def test_get_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - content = await blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + content = await blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfModified) content = await content.readall() # Assert - assert content == b'hello world' + assert content == b"hello world" @BlobPreparer() @recorded_by_proxy_async @@ -799,10 +851,11 @@ async def test_get_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) etag = (await blob.get_blob_properties()).etag # Act @@ -819,16 +872,15 @@ async def test_set_blob_props_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_http_headers(content_settings, if_modified_since=test_datetime) # Assert @@ -845,17 +897,16 @@ async def test_set_blob_props_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_http_headers(content_settings, if_modified_since=test_datetime) # Assert @@ -870,16 +921,15 @@ async def test_set_blob_props_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_http_headers(content_settings, if_unmodified_since=test_datetime) # Assert @@ -896,17 +946,16 @@ async def test_set_blob_props_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_http_headers(content_settings, if_unmodified_since=test_datetime) # Assert @@ -920,17 +969,16 @@ async def test_set_blob_props_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") await blob.set_http_headers(content_settings, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert @@ -944,18 +992,19 @@ async def test_set_blob_props_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.set_http_headers(content_settings, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.set_http_headers( + content_settings, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -966,17 +1015,18 @@ async def test_set_blob_props_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.set_http_headers(content_settings, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.set_http_headers( + content_settings, etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert properties = await blob.get_blob_properties() @@ -989,18 +1039,17 @@ async def test_set_blob_props_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act with pytest.raises(ResourceModifiedError) as e: - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") await blob.set_http_headers(content_settings, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -1013,17 +1062,20 @@ async def test_if_blob_exists_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + connection_data_block_size=4 * 1024, + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") old_blob_props = await blob.get_blob_properties() old_blob_version_id = old_blob_props.get("version_id") assert old_blob_version_id is not None - await blob.stage_block(block_id='1', data="this is test content") - await blob.commit_block_list(['1']) + await blob.stage_block(block_id="1", data="this is test content") + await blob.commit_block_list(["1"]) new_blob_props = await blob.get_blob_properties() new_blob_version_id = new_blob_props.get("version_id") @@ -1034,10 +1086,10 @@ async def test_if_blob_exists_vid(self, **kwargs): # Act test_snapshot = await blob.create_snapshot() - blob_snapshot = bsc.get_blob_client(self.container_name, 'blob1', snapshot=test_snapshot) + blob_snapshot = bsc.get_blob_client(self.container_name, "blob1", snapshot=test_snapshot) assert await blob_snapshot.exists() - await blob.stage_block(block_id='1', data="this is additional test content") - await blob.commit_block_list(['1']) + await blob.stage_block(block_id="1", data="this is additional test content") + await blob.commit_block_list(["1"]) # Assert assert await blob_snapshot.exists() @@ -1051,8 +1103,11 @@ async def test_if_blob_with_cpk_exists(self, **kwargs): container_name = self.get_resource_name("testcontainer1") cc = ContainerClient( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name=container_name, - connection_data_block_size=4 * 1024) + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name=container_name, + connection_data_block_size=4 * 1024, + ) await cc.create_container() self._setup() test_cpk = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) @@ -1068,20 +1123,21 @@ async def test_get_blob_properties_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") properties = await blob.get_blob_properties(if_modified_since=test_datetime) # Assert assert isinstance(properties, BlobProperties) - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" return variables @@ -1092,14 +1148,15 @@ async def test_get_blob_properties_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.get_blob_properties(if_modified_since=test_datetime) # Assert @@ -1114,20 +1171,21 @@ async def test_get_blob_properties_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") properties = await blob.get_blob_properties(if_unmodified_since=test_datetime) # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" return variables @@ -1138,14 +1196,15 @@ async def test_get_blob_properties_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.get_blob_properties(if_unmodified_since=test_datetime) # Assert @@ -1159,11 +1218,12 @@ async def test_get_blob_properties_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1171,9 +1231,9 @@ async def test_get_blob_properties_with_if_match(self, **kwargs): # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" @BlobPreparer() @recorded_by_proxy_async @@ -1181,15 +1241,16 @@ async def test_get_blob_properties_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1200,20 +1261,23 @@ async def test_get_blob_properties_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - properties = await blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + properties = await blob.get_blob_properties( + etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert assert properties is not None - assert properties.blob_type.value == 'BlockBlob' + assert properties.blob_type.value == "BlockBlob" assert properties.size == 11 - assert properties.lease.status == 'unlocked' + assert properties.lease.status == "unlocked" @BlobPreparer() @recorded_by_proxy_async @@ -1221,11 +1285,12 @@ async def test_get_blob_properties_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1242,14 +1307,15 @@ async def test_get_blob_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") md = (await blob.get_blob_properties(if_modified_since=test_datetime)).metadata # Assert @@ -1264,15 +1330,16 @@ async def test_get_blob_metadata_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.get_blob_properties(if_modified_since=test_datetime) # Assert @@ -1287,14 +1354,15 @@ async def test_get_blob_metadata_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") md = (await blob.get_blob_properties(if_unmodified_since=test_datetime)).metadata # Assert @@ -1309,15 +1377,16 @@ async def test_get_blob_metadata_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.get_blob_properties(if_unmodified_since=test_datetime) # Assert @@ -1331,11 +1400,12 @@ async def test_get_blob_metadata_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1350,15 +1420,16 @@ async def test_get_blob_metadata_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1369,14 +1440,17 @@ async def test_get_blob_metadata_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - md = (await blob.get_blob_properties(etag='0x111111111111111', match_condition=MatchConditions.IfModified)).metadata + blob = bsc.get_blob_client(self.container_name, "blob1") + md = ( + await blob.get_blob_properties(etag="0x111111111111111", match_condition=MatchConditions.IfModified) + ).metadata # Assert assert md is not None @@ -1387,11 +1461,12 @@ async def test_get_blob_metadata_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1408,15 +1483,16 @@ async def test_set_blob_metadata_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_blob_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -1432,16 +1508,17 @@ async def test_set_blob_metadata_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_blob_metadata(metadata, if_modified_since=test_datetime) # Assert @@ -1456,15 +1533,16 @@ async def test_set_blob_metadata_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_blob_metadata(metadata, if_unmodified_since=test_datetime) # Assert @@ -1480,16 +1558,17 @@ async def test_set_blob_metadata_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.set_blob_metadata(metadata, if_unmodified_since=test_datetime) # Assert @@ -1503,15 +1582,16 @@ async def test_set_blob_metadata_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} await blob.set_blob_metadata(metadata, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert @@ -1524,16 +1604,19 @@ async def test_set_blob_metadata_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.set_blob_metadata(metadata, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.set_blob_metadata( + metadata, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1544,15 +1627,16 @@ async def test_set_blob_metadata_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - metadata = {'hello': 'world', 'number': '42'} - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.set_blob_metadata(metadata, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + metadata = {"hello": "world", "number": "42"} + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.set_blob_metadata(metadata, etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert md = (await blob.get_blob_properties()).metadata @@ -1564,16 +1648,17 @@ async def test_set_blob_metadata_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act with pytest.raises(ResourceModifiedError) as e: - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} await blob.set_blob_metadata(metadata, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -1586,14 +1671,15 @@ async def test_delete_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = await blob.delete_blob(if_modified_since=test_datetime) # Assert @@ -1608,14 +1694,15 @@ async def test_delete_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.delete_blob(if_modified_since=test_datetime) @@ -1631,14 +1718,15 @@ async def test_delete_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = await blob.delete_blob(if_unmodified_since=test_datetime) # Assert @@ -1653,14 +1741,15 @@ async def test_delete_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.delete_blob(if_unmodified_since=test_datetime) @@ -1675,11 +1764,12 @@ async def test_delete_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1695,15 +1785,16 @@ async def test_delete_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: - await blob.delete_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + await blob.delete_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1714,14 +1805,15 @@ async def test_delete_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - resp = await blob.delete_blob(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + resp = await blob.delete_blob(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert resp is None @@ -1732,11 +1824,12 @@ async def test_delete_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1753,19 +1846,20 @@ async def test_snapshot_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = await blob.create_snapshot(if_modified_since=test_datetime) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None return variables @@ -1776,15 +1870,16 @@ async def test_snapshot_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.create_snapshot(if_modified_since=test_datetime) # Assert @@ -1799,19 +1894,20 @@ async def test_snapshot_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") resp = await blob.create_snapshot(if_unmodified_since=test_datetime) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None return variables @@ -1822,15 +1918,16 @@ async def test_snapshot_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.create_snapshot(if_unmodified_since=test_datetime) # Assert @@ -1844,11 +1941,12 @@ async def test_snapshot_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1856,7 +1954,7 @@ async def test_snapshot_blob_with_if_match(self, **kwargs): # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy_async @@ -1864,15 +1962,16 @@ async def test_snapshot_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.create_snapshot(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.create_snapshot(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -1883,18 +1982,19 @@ async def test_snapshot_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - resp = await blob.create_snapshot(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + resp = await blob.create_snapshot(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy_async @@ -1902,11 +2002,12 @@ async def test_snapshot_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -1923,18 +2024,17 @@ async def test_lease_blob_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - lease = await blob.acquire_lease( - if_modified_since=test_datetime, - lease_id=test_lease_id) + blob = bsc.get_blob_client(self.container_name, "blob1") + lease = await blob.acquire_lease(if_modified_since=test_datetime, lease_id=test_lease_id) await lease.break_lease() @@ -1951,16 +2051,17 @@ async def test_lease_blob_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.acquire_lease(lease_id=test_lease_id, if_modified_since=test_datetime) # Assert @@ -1975,18 +2076,17 @@ async def test_lease_blob_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - lease = await blob.acquire_lease( - if_unmodified_since=test_datetime, - lease_id=test_lease_id) + blob = bsc.get_blob_client(self.container_name, "blob1") + lease = await blob.acquire_lease(if_unmodified_since=test_datetime, lease_id=test_lease_id) await lease.break_lease() @@ -2003,15 +2103,16 @@ async def test_lease_blob_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.acquire_lease(lease_id=test_lease_id, if_unmodified_since=test_datetime) @@ -2026,18 +2127,19 @@ async def test_lease_blob_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act lease = await blob.acquire_lease( - lease_id=test_lease_id, - etag=etag, match_condition=MatchConditions.IfNotModified) + lease_id=test_lease_id, etag=etag, match_condition=MatchConditions.IfNotModified + ) await lease.break_lease() @@ -2053,19 +2155,19 @@ async def test_lease_blob_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.acquire_lease( - lease_id=test_lease_id, - etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified) + lease_id=test_lease_id, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2076,18 +2178,18 @@ async def test_lease_blob_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - test_lease_id = '00000000-1111-2222-3333-444444444444' + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") lease = await blob.acquire_lease( - lease_id=test_lease_id, - etag='0x111111111111111', - match_condition=MatchConditions.IfModified) + lease_id=test_lease_id, etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) await lease.break_lease() @@ -2101,20 +2203,18 @@ async def test_lease_blob_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_block_blob( - self.container_name, 'blob1', b'hello world', bsc) - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_block_blob(self.container_name, "blob1", b"hello world", bsc) + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag - test_lease_id = '00000000-1111-2222-3333-444444444444' + test_lease_id = "00000000-1111-2222-3333-444444444444" # Act with pytest.raises(ResourceModifiedError) as e: - await blob.acquire_lease( - lease_id=test_lease_id, - etag=etag, - match_condition=MatchConditions.IfModified) + await blob.acquire_lease(lease_id=test_lease_id, etag=etag, match_condition=MatchConditions.IfModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2126,24 +2226,24 @@ async def test_put_block_list_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list, if_modified_since=test_datetime) # Assert content = await blob.download_blob() content = await content.readall() - assert content == b'AAABBBCCC' + assert content == b"AAABBBCCC" return variables @@ -2153,24 +2253,26 @@ async def test_put_block_list_returns_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + versioned_storage_account_key, + connection_data_block_size=4 * 1024, + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] resp = await blob.commit_block_list(block_list) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None content = await blob.download_blob() content = await content.readall() - assert content == b'AAABBBCCC' + assert content == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy_async @@ -2179,21 +2281,22 @@ async def test_put_block_list_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: await blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - if_modified_since=test_datetime) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + if_modified_since=test_datetime, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2207,24 +2310,24 @@ async def test_put_block_list_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list, if_unmodified_since=test_datetime) # Assert content = await blob.download_blob() content = await content.readall() - assert content == b'AAABBBCCC' + assert content == b"AAABBBCCC" return variables @@ -2235,21 +2338,22 @@ async def test_put_block_list_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: await blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - if_unmodified_since=test_datetime) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + if_unmodified_since=test_datetime, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2262,24 +2366,24 @@ async def test_put_block_list_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) etag = (await blob.get_blob_properties()).etag # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list, etag=etag, match_condition=MatchConditions.IfNotModified) # Assert content = await blob.download_blob() content = await content.readall() - assert content == b'AAABBBCCC' + assert content == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy_async @@ -2287,20 +2391,22 @@ async def test_put_block_list_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) # Act with pytest.raises(ResourceModifiedError) as e: await blob.commit_block_list( - [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')], - etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")], + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2311,23 +2417,23 @@ async def test_put_block_list_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - await blob.commit_block_list(block_list, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + await blob.commit_block_list(block_list, etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert content = await blob.download_blob() content = await content.readall() - assert content == b'AAABBBCCC' + assert content == b"AAABBBCCC" @BlobPreparer() @recorded_by_proxy_async @@ -2335,19 +2441,19 @@ async def test_put_block_list_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_block_blob( - self.container_name, 'blob1', b'', bsc) - await asyncio.gather(*[ - blob.stage_block('1', b'AAA'), - blob.stage_block('2', b'BBB'), - blob.stage_block('3', b'CCC')]) + container, blob = await self._create_container_and_block_blob(self.container_name, "blob1", b"", bsc) + await asyncio.gather( + *[blob.stage_block("1", b"AAA"), blob.stage_block("2", b"BBB"), blob.stage_block("3", b"CCC")] + ) etag = (await blob.get_blob_properties()).etag # Act with pytest.raises(ResourceModifiedError) as e: - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list, etag=etag, match_condition=MatchConditions.IfModified) # Assert @@ -2360,15 +2466,16 @@ async def test_update_page_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.upload_page(data, offset=0, length=512, if_modified_since=test_datetime) return variables @@ -2380,15 +2487,16 @@ async def test_update_page_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.upload_page(data, offset=0, length=512, if_modified_since=test_datetime) @@ -2404,15 +2512,16 @@ async def test_update_page_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") await blob.upload_page(data, offset=0, length=512, if_unmodified_since=test_datetime) return variables @@ -2424,15 +2533,16 @@ async def test_update_page_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: await blob.upload_page(data, offset=0, length=512, if_unmodified_since=test_datetime) @@ -2447,12 +2557,13 @@ async def test_update_page_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -2466,16 +2577,19 @@ async def test_update_page_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') + blob = bsc.get_blob_client(self.container_name, "blob1") with pytest.raises(ResourceModifiedError) as e: - await blob.upload_page(data, offset=0, length=512, etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + await blob.upload_page( + data, offset=0, length=512, etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2486,15 +2600,18 @@ async def test_update_page_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 # Act - blob = bsc.get_blob_client(self.container_name, 'blob1') - await blob.upload_page(data, offset=0, length=512, etag='0x111111111111111', match_condition=MatchConditions.IfModified) + blob = bsc.get_blob_client(self.container_name, "blob1") + await blob.upload_page( + data, offset=0, length=512, etag="0x111111111111111", match_condition=MatchConditions.IfModified + ) # Assert @@ -2504,12 +2621,13 @@ async def test_update_page_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - await self._create_container_and_page_blob( - self.container_name, 'blob1', 1024, bsc) - data = b'abcdefghijklmnop' * 32 - blob = bsc.get_blob_client(self.container_name, 'blob1') + await self._create_container_and_page_blob(self.container_name, "blob1", 1024, bsc) + data = b"abcdefghijklmnop" * 32 + blob = bsc.get_blob_client(self.container_name, "blob1") etag = (await blob.get_blob_properties()).etag # Act @@ -2526,21 +2644,24 @@ async def test_get_page_ranges_iter_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act ranges = await blob.get_page_ranges(if_modified_since=test_datetime) # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} return variables @@ -2551,13 +2672,16 @@ async def test_get_page_ranges_iter_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2575,21 +2699,24 @@ async def test_get_page_ranges_iter_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act ranges = await blob.get_page_ranges(if_unmodified_since=test_datetime) # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} return variables @@ -2600,13 +2727,16 @@ async def test_get_page_ranges_iter_with_if_unmod_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2623,12 +2753,15 @@ async def test_get_page_ranges_iter_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) etag = (await blob.get_blob_properties()).etag # Act @@ -2636,8 +2769,8 @@ async def test_get_page_ranges_iter_with_if_match(self, **kwargs): # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} @BlobPreparer() @recorded_by_proxy_async @@ -2645,16 +2778,19 @@ async def test_get_page_ranges_iter_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act with pytest.raises(ResourceModifiedError) as e: - await blob.get_page_ranges(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + await blob.get_page_ranges(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2665,20 +2801,23 @@ async def test_get_page_ranges_iter_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) # Act - ranges = await blob.get_page_ranges(etag='0x111111111111111', match_condition=MatchConditions.IfModified) + ranges = await blob.get_page_ranges(etag="0x111111111111111", match_condition=MatchConditions.IfModified) # Assert assert len(ranges[0]) == 2 - assert ranges[0][0] == {'start': 0, 'end': 511} - assert ranges[0][1] == {'start': 1024, 'end': 1535} + assert ranges[0][0] == {"start": 0, "end": 511} + assert ranges[0][1] == {"start": 1024, "end": 1535} @BlobPreparer() @recorded_by_proxy_async @@ -2686,13 +2825,16 @@ async def test_get_page_ranges_iter_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_page_blob( - self.container_name, 'blob1', 2048, bsc) - data = b'abcdefghijklmnop' * 32 + container, blob = await self._create_container_and_page_blob(self.container_name, "blob1", 2048, bsc) + data = b"abcdefghijklmnop" * 32 - await asyncio.gather(blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512)) + await asyncio.gather( + blob.upload_page(data, offset=0, length=512), blob.upload_page(data, offset=1024, length=512) + ) etag = (await blob.get_blob_properties()).etag # Act @@ -2709,19 +2851,21 @@ async def test_append_block_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), if_modified_since=test_datetime) + resp = await blob.append_block("block {0}".format(i), if_modified_since=test_datetime) assert resp is not None # Assert content = await blob.download_blob() content = await content.readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content return variables @@ -2732,14 +2876,16 @@ async def test_append_block_with_if_modified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), if_modified_since=test_datetime) + resp = await blob.append_block("block {0}".format(i), if_modified_since=test_datetime) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2753,19 +2899,21 @@ async def test_append_block_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), if_unmodified_since=test_datetime) + resp = await blob.append_block("block {0}".format(i), if_unmodified_since=test_datetime) assert resp is not None # Assert content = await blob.download_blob() content = await content.readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content return variables @@ -2776,14 +2924,16 @@ async def test_append_block_with_if_unmodified_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), if_unmodified_since=test_datetime) + resp = await blob.append_block("block {0}".format(i), if_unmodified_since=test_datetime) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2796,20 +2946,24 @@ async def test_append_block_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act for i in range(5): etag = (await blob.get_blob_properties()).etag - resp = await blob.append_block(u'block {0}'.format(i), etag=etag, match_condition=MatchConditions.IfNotModified) + resp = await blob.append_block( + "block {0}".format(i), etag=etag, match_condition=MatchConditions.IfNotModified + ) assert resp is not None # Assert content = await blob.download_blob() content = await content.readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content @BlobPreparer() @recorded_by_proxy_async @@ -2817,14 +2971,18 @@ async def test_append_block_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act with pytest.raises(HttpResponseError) as e: for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + resp = await blob.append_block( + "block {0}".format(i), etag="0x111111111111111", match_condition=MatchConditions.IfNotModified + ) @BlobPreparer() @recorded_by_proxy_async @@ -2832,19 +2990,23 @@ async def test_append_block_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act for i in range(5): - resp = await blob.append_block(u'block {0}'.format(i), etag='0x8D2C9167D53FC2C', match_condition=MatchConditions.IfModified) + resp = await blob.append_block( + "block {0}".format(i), etag="0x8D2C9167D53FC2C", match_condition=MatchConditions.IfModified + ) assert resp is not None # Assert content = await blob.download_blob() content = await content.readall() - assert b'block 0block 1block 2block 3block 4' == content + assert b"block 0block 1block 2block 3block 4" == content @BlobPreparer() @recorded_by_proxy_async @@ -2852,15 +3014,19 @@ async def test_append_block_with_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() - container, blob = await self._create_container_and_append_blob(self.container_name, 'blob1', bsc) + container, blob = await self._create_container_and_append_blob(self.container_name, "blob1", bsc) # Act with pytest.raises(ResourceModifiedError) as e: for i in range(5): etag = (await blob.get_blob_properties()).etag - resp = await blob.append_block(u'block {0}'.format(i), etag=etag, match_condition=MatchConditions.IfModified) + resp = await blob.append_block( + "block {0}".format(i), etag=etag, match_condition=MatchConditions.IfModified + ) # Assert assert StorageErrorCode.condition_not_met == e.value.error_code @@ -2872,11 +3038,13 @@ async def test_append_blob_from_bytes_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) @@ -2896,11 +3064,13 @@ async def test_apnd_blob_from_bytes_with_if_mod_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2918,11 +3088,13 @@ async def test_append_blob_from_bytes_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() + timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() + timedelta(minutes=15)) # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) @@ -2942,11 +3114,13 @@ async def test_append_blob_from_bytes_with_if_unmod_fail(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_datetime = self.get_datetime_variable(variables, 'if_modified', datetime.utcnow() - timedelta(minutes=15)) + test_datetime = self.get_datetime_variable(variables, "if_modified", datetime.utcnow() - timedelta(minutes=15)) # Act with pytest.raises(ResourceModifiedError) as e: @@ -2963,7 +3137,9 @@ async def test_append_blob_from_bytes_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) @@ -2971,7 +3147,9 @@ async def test_append_blob_from_bytes_with_if_match(self, **kwargs): # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - await blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified) + await blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified + ) # Assert content = await blob.download_blob() @@ -2984,16 +3162,20 @@ async def test_append_blob_from_bytes_with_if_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_etag = '0x8D2C9167D53FC2C' + test_etag = "0x8D2C9167D53FC2C" # Act with pytest.raises(ResourceModifiedError) as e: data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - await blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified) + await blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfNotModified + ) assert StorageErrorCode.condition_not_met == e.value.error_code @@ -3003,15 +3185,19 @@ async def test_append_blob_from_bytes_with_if_none_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) - test_etag = '0x8D2C9167D53FC2C' + test_etag = "0x8D2C9167D53FC2C" # Act data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - await blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified) + await blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified + ) # Assert content = await blob.download_blob() @@ -3024,7 +3210,9 @@ async def test_apnd_blob_from_bytes_if_none_match_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, connection_data_block_size=4 * 1024 + ) self._setup() blob_name = self.get_resource_name("blob") container, blob = await self._create_container_and_append_blob(self.container_name, blob_name, bsc) @@ -3033,7 +3221,9 @@ async def test_apnd_blob_from_bytes_if_none_match_fail(self, **kwargs): # Act with pytest.raises(ResourceModifiedError) as e: data = self.get_random_bytes(LARGE_APPEND_BLOB_SIZE) - await blob.upload_blob(data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified) + await blob.upload_blob( + data, blob_type=BlobType.AppendBlob, etag=test_etag, match_condition=MatchConditions.IfModified + ) assert StorageErrorCode.condition_not_met == e.value.error_code @@ -3044,13 +3234,13 @@ async def test_header_metadata_sort_in_upload_blob_fails(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = await bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Relevant ASCII characters (excluding 'Bad Request' values) ascii_subset = "!#$%&*+.-^_~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz|~" @@ -3058,7 +3248,7 @@ async def test_header_metadata_sort_in_upload_blob_fails(self, **kwargs): # Build out metadata metadata = dict() for c in ascii_subset: - metadata[c] = 'a' + metadata[c] = "a" # Act # If we hit invalid metadata error, that means we have successfully sorted headers properly to pass auth error @@ -3075,19 +3265,44 @@ async def test_header_metadata_sort_in_upload_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = await bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Hand-picked metadata examples as Python & service don't sort '_' with the same weight - metadata = {'a0': 'a', 'a1': 'a', 'a2': 'a', 'a3': 'a', 'a4': 'a', 'a5': 'a', 'a6': 'a', 'a7': 'a', 'a8': 'a', - 'a9': 'a', '_': 'a', '_a': 'a', 'a_': 'a', '__': 'a', '_a_': 'a', 'b': 'a', 'c': 'a', 'y': 'a', - 'z': 'z_', '_z': 'a', '_F': 'a', 'F': 'a', 'F_': 'a', '_F_': 'a', '__F': 'a', '__a': 'a', 'a__': 'a' - } + metadata = { + "a0": "a", + "a1": "a", + "a2": "a", + "a3": "a", + "a4": "a", + "a5": "a", + "a6": "a", + "a7": "a", + "a8": "a", + "a9": "a", + "_": "a", + "_a": "a", + "a_": "a", + "__": "a", + "_a_": "a", + "b": "a", + "c": "a", + "y": "a", + "z": "z_", + "_z": "a", + "_F": "a", + "F": "a", + "F_": "a", + "_F_": "a", + "__F": "a", + "__a": "a", + "a__": "a", + } # Act await blob_client.upload_blob(data, length=len(data), metadata=metadata) @@ -3099,19 +3314,44 @@ async def test_header_metadata_sort_in_upload_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = await bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Hand-picked metadata examples as Python & service don't sort '_' with the same weight - metadata = {'a0': 'a', 'a1': 'a', 'a2': 'a', 'a3': 'a', 'a4': 'a', 'a5': 'a', 'a6': 'a', 'a7': 'a', 'a8': 'a', - 'a9': 'a', '_': 'a', '_a': 'a', 'a_': 'a', '__': 'a', '_a_': 'a', 'b': 'a', 'c': 'a', 'y': 'a', - 'z': 'z_', '_z': 'a', '_F': 'a', 'F': 'a', 'F_': 'a', '_F_': 'a', '__F': 'a', '__a': 'a', 'a__': 'a' - } + metadata = { + "a0": "a", + "a1": "a", + "a2": "a", + "a3": "a", + "a4": "a", + "a5": "a", + "a6": "a", + "a7": "a", + "a8": "a", + "a9": "a", + "_": "a", + "_a": "a", + "a_": "a", + "__": "a", + "_a_": "a", + "b": "a", + "c": "a", + "y": "a", + "z": "z_", + "_z": "a", + "_F": "a", + "F": "a", + "F_": "a", + "_F_": "a", + "__F": "a", + "__a": "a", + "a__": "a", + } # Act await blob_client.upload_blob(data, length=len(data), metadata=metadata) @@ -3123,30 +3363,30 @@ async def test_header_metadata_sort_in_upload_blob_translation(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup() - data = b'hello world' + data = b"hello world" bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) try: container_client = await bsc.create_container(self.container_name) except: container_client = bsc.get_container_client(self.container_name) - blob_client = container_client.get_blob_client('blob1') + blob_client = container_client.get_blob_client("blob1") # Hand-picked metadata examples that sorted incorrectly with our previous implementation. metadata = { - 'test': 'val', - 'test-': 'val', - 'test--': 'val', - 'test-_': 'val', - 'test_-': 'val', - 'test__': 'val', - 'test-a': 'val', - 'test-A': 'val', - 'test-_A': 'val', - 'test_a': 'val', - 'test_Z': 'val', - 'test_a_': 'val', - 'test_a-': 'val', - 'test_a-_': 'val', + "test": "val", + "test-": "val", + "test--": "val", + "test-_": "val", + "test_-": "val", + "test__": "val", + "test-a": "val", + "test-A": "val", + "test-_A": "val", + "test_a": "val", + "test_Z": "val", + "test_a_": "val", + "test_a-": "val", + "test_a-_": "val", } # Act @@ -3157,4 +3397,5 @@ async def test_header_metadata_sort_in_upload_blob_translation(self, **kwargs): # Assert assert StorageErrorCode.invalid_metadata == e.value.error_code + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_api_version.py b/sdk/storage/azure-storage-blob/tests/test_blob_api_version.py index 6ed00bc5a1dd..60a96d9cabc4 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_api_version.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_api_version.py @@ -17,7 +17,7 @@ from devtools_testutils.storage import StorageRecordedTestCase from settings.testcase import BlobPreparer -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" class TestStorageBlobApiVersion(StorageRecordedTestCase): @@ -26,7 +26,7 @@ class TestStorageBlobApiVersion(StorageRecordedTestCase): def _setup(self): self.api_version_1 = "2019-02-02" self.api_version_2 = X_MS_VERSION - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") def _get_blob_reference(self, prefix=TEST_BLOB_PREFIX): return self.get_resource_name(prefix) @@ -43,9 +43,7 @@ def _create_container(self, bsc): def test_service_client_api_version_property(self): self._setup() - service_client = BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key") + service_client = BlobServiceClient("https://foo.blob.core.windows.net/account", credential="fake_key") assert service_client.api_version == self.api_version_2 assert service_client._client._config.version == self.api_version_2 @@ -53,9 +51,8 @@ def test_service_client_api_version_property(self): service_client.api_version = "foo" service_client = BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key", - api_version=self.api_version_1) + "https://foo.blob.core.windows.net/account", credential="fake_key", api_version=self.api_version_1 + ) assert service_client.api_version == self.api_version_1 assert service_client._client._config.version == self.api_version_1 @@ -70,9 +67,8 @@ def test_service_client_api_version_property(self): def test_container_client_api_version_property(self): self._setup() container_client = ContainerClient( - "https://foo.blob.core.windows.net/account", - self.container_name, - credential="fake_key") + "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key" + ) assert container_client.api_version == self.api_version_2 assert container_client._client._config.version == self.api_version_2 @@ -80,7 +76,8 @@ def test_container_client_api_version_property(self): "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key", - api_version=self.api_version_1) + api_version=self.api_version_1, + ) assert container_client.api_version == self.api_version_1 assert container_client._client._config.version == self.api_version_1 @@ -95,7 +92,8 @@ def test_blob_client_api_version_property(self): self.container_name, self._get_blob_reference(), credential="fake_key", - api_version=self.api_version_1) + api_version=self.api_version_1, + ) assert blob_client.api_version == self.api_version_1 assert blob_client._client._config.version == self.api_version_1 @@ -103,17 +101,15 @@ def test_blob_client_api_version_property(self): "https://foo.blob.core.windows.net/account", self.container_name, self._get_blob_reference(), - credential="fake_key") + credential="fake_key", + ) assert blob_client.api_version == self.api_version_2 assert blob_client._client._config.version == self.api_version_2 def test_invalid_api_version(self): self._setup() with pytest.raises(ValueError) as error: - BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key", - api_version="foo") + BlobServiceClient("https://foo.blob.core.windows.net/account", credential="fake_key", api_version="foo") assert str(error.value).startswith("Unsupported API version 'foo'.") with pytest.raises(ValueError) as error: @@ -121,7 +117,8 @@ def test_invalid_api_version(self): "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key", - api_version="foo") + api_version="foo", + ) assert str(error.value).startswith("Unsupported API version 'foo'.") with pytest.raises(ValueError) as error: @@ -130,7 +127,8 @@ def test_invalid_api_version(self): self.container_name, self._get_blob_reference(), credential="fake_key", - api_version="foo") + api_version="foo", + ) assert str(error.value).startswith("Unsupported API version 'foo'.") @BlobPreparer() @@ -145,7 +143,8 @@ def test_old_api_get_page_ranges_succeeds(self, **kwargs): credential=storage_account_key, connection_data_block_size=4 * 1024, max_page_size=4 * 1024, - api_version=self.api_version_1) + api_version=self.api_version_1, + ) container = self._create_container(bsc) blob_name = self._get_blob_reference() @@ -160,7 +159,7 @@ def test_old_api_get_page_ranges_succeeds(self, **kwargs): # Act ranges1, cleared1 = blob.get_page_ranges(previous_snapshot_diff=snapshot1) - ranges2, cleared2 = blob.get_page_ranges(previous_snapshot_diff=snapshot2['snapshot']) + ranges2, cleared2 = blob.get_page_ranges(previous_snapshot_diff=snapshot2["snapshot"]) # Assert assert ranges1 is not None @@ -168,19 +167,20 @@ def test_old_api_get_page_ranges_succeeds(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_api_version_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_api_version_async.py index 9f40d21c7afc..b297dc39355e 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_api_version_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_api_version_async.py @@ -17,7 +17,7 @@ from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase from settings.testcase import BlobPreparer -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" class TestStorageBlobApiVersionAsync(AsyncStorageRecordedTestCase): @@ -26,7 +26,7 @@ class TestStorageBlobApiVersionAsync(AsyncStorageRecordedTestCase): def _setup(self): self.api_version_1 = "2019-02-02" self.api_version_2 = X_MS_VERSION - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") def _get_blob_reference(self, prefix=TEST_BLOB_PREFIX): return self.get_resource_name(prefix) @@ -43,9 +43,7 @@ async def _create_container(self, bsc): def test_service_client_api_version_property(self): self._setup() - service_client = BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key") + service_client = BlobServiceClient("https://foo.blob.core.windows.net/account", credential="fake_key") assert service_client.api_version == self.api_version_2 assert service_client._client._config.version == self.api_version_2 @@ -53,9 +51,8 @@ def test_service_client_api_version_property(self): service_client.api_version = "foo" service_client = BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key", - api_version=self.api_version_1) + "https://foo.blob.core.windows.net/account", credential="fake_key", api_version=self.api_version_1 + ) assert service_client.api_version == self.api_version_1 assert service_client._client._config.version == self.api_version_1 @@ -70,9 +67,8 @@ def test_service_client_api_version_property(self): def test_container_client_api_version_property(self): self._setup() container_client = ContainerClient( - "https://foo.blob.core.windows.net/account", - self.container_name, - credential="fake_key") + "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key" + ) assert container_client.api_version == self.api_version_2 assert container_client._client._config.version == self.api_version_2 @@ -80,7 +76,8 @@ def test_container_client_api_version_property(self): "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key", - api_version=self.api_version_1) + api_version=self.api_version_1, + ) assert container_client.api_version == self.api_version_1 assert container_client._client._config.version == self.api_version_1 @@ -95,7 +92,8 @@ def test_blob_client_api_version_property(self): self.container_name, self._get_blob_reference(), credential="fake_key", - api_version=self.api_version_1) + api_version=self.api_version_1, + ) assert blob_client.api_version == self.api_version_1 assert blob_client._client._config.version == self.api_version_1 @@ -103,17 +101,15 @@ def test_blob_client_api_version_property(self): "https://foo.blob.core.windows.net/account", self.container_name, self._get_blob_reference(), - credential="fake_key") + credential="fake_key", + ) assert blob_client.api_version == self.api_version_2 assert blob_client._client._config.version == self.api_version_2 def test_invalid_api_version(self): self._setup() with pytest.raises(ValueError) as error: - BlobServiceClient( - "https://foo.blob.core.windows.net/account", - credential="fake_key", - api_version="foo") + BlobServiceClient("https://foo.blob.core.windows.net/account", credential="fake_key", api_version="foo") assert str(error.value).startswith("Unsupported API version 'foo'.") with pytest.raises(ValueError) as error: @@ -121,7 +117,8 @@ def test_invalid_api_version(self): "https://foo.blob.core.windows.net/account", self.container_name, credential="fake_key", - api_version="foo") + api_version="foo", + ) assert str(error.value).startswith("Unsupported API version 'foo'.") with pytest.raises(ValueError) as error: @@ -130,7 +127,8 @@ def test_invalid_api_version(self): self.container_name, self._get_blob_reference(), credential="fake_key", - api_version="foo") + api_version="foo", + ) assert str(error.value).startswith("Unsupported API version 'foo'.") @BlobPreparer() @@ -145,7 +143,8 @@ async def test_old_api_get_page_ranges_succeeds(self, **kwargs): credential=storage_account_key, connection_data_block_size=4 * 1024, max_page_size=4 * 1024, - api_version=self.api_version_1) + api_version=self.api_version_1, + ) container = await self._create_container(bsc) blob_name = self._get_blob_reference() @@ -160,7 +159,7 @@ async def test_old_api_get_page_ranges_succeeds(self, **kwargs): # Act ranges1, cleared1 = await blob.get_page_ranges(previous_snapshot_diff=snapshot1) - ranges2, cleared2 = await blob.get_page_ranges(previous_snapshot_diff=snapshot2['snapshot']) + ranges2, cleared2 = await blob.get_page_ranges(previous_snapshot_diff=snapshot2["snapshot"]) # Assert assert ranges1 is not None @@ -168,19 +167,20 @@ async def test_old_api_get_page_ranges_succeeds(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_client.py b/sdk/storage/azure-storage-blob/tests/test_blob_client.py index 1d6361a8ef28..05ae3bed33a1 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_client.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_client.py @@ -25,12 +25,12 @@ from settings.testcase import BlobPreparer SERVICES = { - BlobServiceClient: 'blob', - ContainerClient: 'blob', - BlobClient: 'blob', + BlobServiceClient: "blob", + ContainerClient: "blob", + BlobClient: "blob", } -_CONNECTION_ENDPOINTS = {'blob': 'BlobEndpoint'} -_CONNECTION_ENDPOINTS_SECONDARY = {'blob': 'BlobSecondaryEndpoint'} +_CONNECTION_ENDPOINTS = {"blob": "BlobEndpoint"} +_CONNECTION_ENDPOINTS_SECONDARY = {"blob": "BlobSecondaryEndpoint"} class TestStorageClient(StorageRecordedTestCase): @@ -41,8 +41,8 @@ def validate_standard_account_endpoints(self, service, url_type, name, storage_a assert service.account_name == name assert service.credential.account_name == name assert service.credential.account_key == storage_account_key - assert '{}.{}.core.windows.net'.format(name, url_type) in service.url - assert '{}-secondary.{}.core.windows.net'.format(name, url_type) in service.secondary_endpoint + assert "{}.{}.core.windows.net".format(name, url_type) in service.url + assert "{}-secondary.{}.core.windows.net".format(name, url_type) in service.secondary_endpoint def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -66,11 +66,15 @@ def test_create_service_with_key(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) # Assert self.validate_standard_account_endpoints(service, url, storage_account_name, storage_account_key) - assert service.scheme == 'https' + assert service.scheme == "https" @BlobPreparer() def test_create_blob_client_with_complete_blob_url(self, **kwargs): @@ -79,12 +83,12 @@ def test_create_blob_client_with_complete_blob_url(self, **kwargs): # Arrange blob_url = self.account_url(storage_account_name, "blob") + "/foourl/barurl" - service = BlobClient(blob_url, credential=storage_account_key, container_name='foo', blob_name='bar') + service = BlobClient(blob_url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert - assert service.scheme == 'https' - assert service.container_name == 'foo' - assert service.blob_name == 'bar' + assert service.scheme == "https" + assert service.container_name == "foo" + assert service.blob_name == "bar" assert service.account_name == storage_account_name @BlobPreparer() @@ -95,11 +99,16 @@ def test_create_service_with_connection_string(self, **kwargs): for service_type in SERVICES.items(): # Act service = service_type[0].from_connection_string( - self.connection_string(storage_account_name, storage_account_key), container_name="test", blob_name="test") + self.connection_string(storage_account_name, storage_account_key), + container_name="test", + blob_name="test", + ) # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'https' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "https" @BlobPreparer() def test_create_service_with_sas(self, **kwargs): @@ -110,12 +119,16 @@ def test_create_service_with_sas(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=sas_token, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=sas_token, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.url.endswith(sas_token) assert service.credential is None @@ -130,12 +143,16 @@ def test_create_service_with_sas_credential(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=sas_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=sas_credential, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert not service.url.endswith(sas_token) assert service.credential == sas_credential @@ -151,7 +168,11 @@ def test_create_service_with_sas_credential_url_raises_if_sas_is_in_uri(self, ** # Act with pytest.raises(ValueError): service = service_type( - self.account_url(storage_account_name, "blob") + "?sig=foo", credential=sas_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob") + "?sig=foo", + credential=sas_credential, + container_name="foo", + blob_name="bar", + ) @BlobPreparer() def test_create_service_with_token(self, **kwargs): @@ -161,11 +182,15 @@ def test_create_service_with_token(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=token_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=token_credential, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.credential == token_credential assert service.account_name == storage_account_name @@ -177,8 +202,8 @@ def test_create_service_with_token_and_http(self, **kwargs): for service_type in SERVICES: # Act with pytest.raises(ValueError): - url = self.account_url(storage_account_name, "blob").replace('https', 'http') - service_type(url, credential=token_credential, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("https", "http") + service_type(url, credential=token_credential, container_name="foo", blob_name="bar") @BlobPreparer() def test_create_service_china(self, **kwargs): @@ -188,9 +213,8 @@ def test_create_service_china(self, **kwargs): # Arrange for service_type in SERVICES.items(): # Act - url = self.account_url(storage_account_name, "blob").replace('core.windows.net', 'core.chinacloudapi.cn') - service = service_type[0]( - url, credential=storage_account_key, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("core.windows.net", "core.chinacloudapi.cn") + service = service_type[0](url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert assert service is not None @@ -198,9 +222,11 @@ def test_create_service_china(self, **kwargs): assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key assert service.primary_endpoint.startswith( - 'https://{}.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) + "https://{}.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) assert service.secondary_endpoint.startswith( - 'https://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) + "https://{}-secondary.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) @BlobPreparer() def test_create_service_protocol(self, **kwargs): @@ -210,13 +236,14 @@ def test_create_service_protocol(self, **kwargs): # Arrange for service_type in SERVICES.items(): # Act - url = self.account_url(storage_account_name, "blob").replace('https', 'http') - service = service_type[0]( - url, credential=storage_account_key, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("https", "http") + service = service_type[0](url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'http' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "http" @BlobPreparer() def test_create_blob_service_anonymous(self, **kwargs): @@ -227,12 +254,14 @@ def test_create_blob_service_anonymous(self, **kwargs): for service_type in BLOB_SERVICES: # Act - service = service_type(self.account_url(storage_account_name, "blob"), container_name='foo', blob_name='bar') + service = service_type( + self.account_url(storage_account_name, "blob"), container_name="foo", blob_name="bar" + ) # Assert assert service is not None assert service.account_name == storage_account_name - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.credential is None @BlobPreparer() @@ -246,18 +275,21 @@ def test_create_blob_service_custom_domain(self, **kwargs): for service_type in BLOB_SERVICES: # Act service = service_type( - 'www.mydomain.com', - credential={'account_name': storage_account_name, 'account_key': storage_account_key}, - container_name='foo', - blob_name='bar') + "www.mydomain.com", + credential={"account_name": storage_account_name, "account_key": storage_account_key}, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_create_service_with_socket_timeout(self, **kwargs): @@ -269,14 +301,23 @@ def test_create_service_with_socket_timeout(self, **kwargs): for service_type in SERVICES.items(): # Act default_service = service_type[0]( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, - container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) service = service_type[0]( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, - container_name='foo', blob_name='bar', connection_timeout=22) + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + connection_timeout=22, + ) # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] @@ -288,16 +329,17 @@ def test_create_service_with_connection_string_key(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - conn_string = 'AccountName={};AccountKey={};'.format(storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};".format(storage_account_name, storage_account_key) for service_type in SERVICES.items(): # Act - service = service_type[0].from_connection_string( - conn_string, container_name='foo', blob_name='bar') + service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'https' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "https" @BlobPreparer() def test_create_service_with_connection_string_sas(self, **kwargs): @@ -305,16 +347,15 @@ def test_create_service_with_connection_string_sas(self, **kwargs): # Arrange sas_token = self.generate_fake_sas_token() - conn_string = 'AccountName={};SharedAccessSignature={};'.format(storage_account_name, sas_token) + conn_string = "AccountName={};SharedAccessSignature={};".format(storage_account_name, sas_token) for service_type in SERVICES: # Act - service = service_type.from_connection_string( - conn_string, container_name='foo', blob_name='bar') + service = service_type.from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert assert service is not None - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.url.endswith(sas_token) assert service.credential is None assert service.account_name == storage_account_name @@ -325,8 +366,11 @@ def test_create_service_with_connection_string_endpoint_protocol(self, **kwargs) storage_account_key = kwargs.pop("storage_account_key") # Arrange - conn_string = 'AccountName={};AccountKey={};DefaultEndpointsProtocol=http;EndpointSuffix=core.chinacloudapi.cn;'.format( - storage_account_name, storage_account_key) + conn_string = ( + "AccountName={};AccountKey={};DefaultEndpointsProtocol=http;EndpointSuffix=core.chinacloudapi.cn;".format( + storage_account_name, storage_account_key + ) + ) for service_type in SERVICES.items(): # Act @@ -338,10 +382,12 @@ def test_create_service_with_connection_string_endpoint_protocol(self, **kwargs) assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key assert service.primary_endpoint.startswith( - 'http://{}.{}.core.chinacloudapi.cn/'.format(storage_account_name, service_type[1])) + "http://{}.{}.core.chinacloudapi.cn/".format(storage_account_name, service_type[1]) + ) assert service.secondary_endpoint.startswith( - 'http://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) - assert service.scheme == 'http' + "http://{}-secondary.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) + assert service.scheme == "http" @BlobPreparer() def test_create_service_with_connection_string_emulated(self, **kwargs): @@ -350,7 +396,7 @@ def test_create_service_with_connection_string_emulated(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'UseDevelopmentStorage=true;'.format(storage_account_name, storage_account_key) + conn_string = "UseDevelopmentStorage=true;".format(storage_account_name, storage_account_key) # Act with pytest.raises(ValueError): @@ -360,7 +406,7 @@ def test_create_service_with_connection_string_emulated(self, **kwargs): def test_create_service_with_cstr_anonymous(self): # Arrange for service_type in SERVICES.items(): - conn_string = 'BlobEndpoint=www.mydomain.com;' + conn_string = "BlobEndpoint=www.mydomain.com;" # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -369,7 +415,7 @@ def test_create_service_with_cstr_anonymous(self): assert service is not None assert service.account_name == None assert service.credential is None - assert service.primary_endpoint.startswith('https://www.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") with pytest.raises(ValueError): service.secondary_endpoint @@ -380,8 +426,9 @@ def test_create_service_with_cstr_custom_domain(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -391,8 +438,10 @@ def test_create_service_with_cstr_custom_domain(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_create_service_with_cstr_cust_dmn_trailing_slash(self, **kwargs): @@ -401,8 +450,9 @@ def test_create_service_with_cstr_cust_dmn_trailing_slash(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -412,8 +462,10 @@ def test_create_service_with_cstr_cust_dmn_trailing_slash(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_create_service_with_cstr_custom_domain_sec_override(self, **kwargs): @@ -422,20 +474,22 @@ def test_create_service_with_cstr_custom_domain_sec_override(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string( - conn_string, secondary_hostname="www-sec.mydomain.com", container_name="foo", blob_name="bar") + conn_string, secondary_hostname="www-sec.mydomain.com", container_name="foo", blob_name="bar" + ) # Assert assert service is not None assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://www-sec.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith("https://www-sec.mydomain.com/") @BlobPreparer() def test_create_service_with_cstr_fails_if_sec_without_prim(self, **kwargs): @@ -444,9 +498,9 @@ def test_create_service_with_cstr_fails_if_sec_without_prim(self, **kwargs): for service_type in SERVICES.items(): # Arrange - conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;'.format( - storage_account_name, storage_account_key, - _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1])) + conn_string = "AccountName={};AccountKey={};{}=www.mydomain.com;".format( + storage_account_name, storage_account_key, _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]) + ) # Act @@ -461,11 +515,12 @@ def test_create_service_with_cstr_succeeds_if_sec_with_prim(self, **kwargs): for service_type in SERVICES.items(): # Arrange - conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;{}=www-sec.mydomain.com;'.format( + conn_string = "AccountName={};AccountKey={};{}=www.mydomain.com;{}=www-sec.mydomain.com;".format( storage_account_name, storage_account_key, _CONNECTION_ENDPOINTS.get(service_type[1]), - _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1])) + _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]), + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -475,8 +530,8 @@ def test_create_service_with_cstr_succeeds_if_sec_with_prim(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://www-sec.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith("https://www-sec.mydomain.com/") def test_create_service_with_custom_account_endpoint_path(self): account_name = "blobstorage" @@ -484,45 +539,45 @@ def test_create_service_with_custom_account_endpoint_path(self): sas_token = self.generate_fake_sas_token() custom_account_url = "http://local-machine:11002/custom/account/path/" + sas_token for service_type in SERVICES.items(): - conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};'.format( - account_name, account_key, custom_account_url) + conn_string = "DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};".format( + account_name, account_key, custom_account_url + ) # Act - service = service_type[0].from_connection_string( - conn_string, container_name="foo", blob_name="bar") + service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert assert service.account_name == account_name assert service.credential.account_name == account_name assert service.credential.account_key == account_key - assert service.primary_hostname == 'local-machine:11002/custom/account/path' + assert service.primary_hostname == "local-machine:11002/custom/account/path" service = BlobServiceClient(account_url=custom_account_url) assert service.account_name == None assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/?') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/?") service = ContainerClient(account_url=custom_account_url, container_name="foo") assert service.account_name == None assert service.container_name == "foo" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/foo?') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/foo?") service = ContainerClient.from_container_url("http://local-machine:11002/custom/account/path/foo?query=value") assert service.account_name == None assert service.container_name == "foo" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url == 'http://local-machine:11002/custom/account/path/foo' + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url == "http://local-machine:11002/custom/account/path/foo" service = ContainerClient.from_container_url("http://local-machine:11002/custom/account/path/foo/?query=value") assert service.account_name == None assert service.container_name == "foo" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url == 'http://local-machine:11002/custom/account/path/foo' + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url == "http://local-machine:11002/custom/account/path/foo" service = BlobClient(account_url=custom_account_url, container_name="foo", blob_name="bar", snapshot="baz") assert service.account_name == None @@ -530,25 +585,31 @@ def test_create_service_with_custom_account_endpoint_path(self): assert service.blob_name == "bar" assert service.snapshot == "baz" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&") - service = BlobClient.from_blob_url("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value") + service = BlobClient.from_blob_url( + "http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value" + ) assert service.account_name == None assert service.container_name == "foo" assert service.blob_name == "bar" assert service.snapshot == "baz" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url == 'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz' + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url == "http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz" def test_create_blob_client_with_sub_directory_path_in_blob_name(self): - blob_url = "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + blob_url = ( + "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + ) blob_client = BlobClient.from_blob_url(blob_url) assert blob_client.container_name == "containername" assert blob_client.blob_name == "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" - blob_emulator_url = 'http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg' + blob_emulator_url = ( + "http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + ) blob_client = BlobClient.from_blob_url(blob_emulator_url) assert blob_client.container_name == "containername" assert blob_client.blob_name == "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" @@ -562,18 +623,18 @@ def test_from_blob_url_too_short_url(self): def test_create_client_for_emulator(self): container_client = ContainerClient( - account_url='http://127.0.0.1:1000/devstoreaccount1', - container_name='newcontainer', - credential='Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==') + account_url="http://127.0.0.1:1000/devstoreaccount1", + container_name="newcontainer", + credential="Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==", + ) assert container_client.container_name == "newcontainer" assert container_client.account_name == "devstoreaccount1" - ContainerClient.from_container_url('http://127.0.0.1:1000/devstoreaccount1/newcontainer') + ContainerClient.from_container_url("http://127.0.0.1:1000/devstoreaccount1/newcontainer") assert container_client.container_name == "newcontainer" assert container_client.account_name == "devstoreaccount1" - @BlobPreparer() @recorded_by_proxy def test_request_callback_signed_header(self, **kwargs): @@ -582,18 +643,18 @@ def test_request_callback_signed_header(self, **kwargs): # Arrange service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - name = self.get_resource_name('cont') + name = self.get_resource_name("cont") # Act def callback(request): - if request.http_request.method == 'PUT': - request.http_request.headers['x-ms-meta-hello'] = 'world' + if request.http_request.method == "PUT": + request.http_request.headers["x-ms-meta-hello"] = "world" # Assert try: container = service.create_container(name, raw_request_hook=callback) metadata = container.get_container_properties().metadata - assert metadata == {'hello': 'world'} + assert metadata == {"hello": "world"} finally: service.delete_container(name) @@ -605,7 +666,7 @@ def test_response_callback(self, **kwargs): # Arrange service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - name = self.get_resource_name('cont') + name = self.get_resource_name("cont") container = service.get_container_client(name) # Act @@ -626,8 +687,8 @@ def test_user_agent_default(self, **kwargs): service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert "azsdk-python-storage-blob/{}".format(VERSION) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert "azsdk-python-storage-blob/{}".format(VERSION) in response.http_request.headers["User-Agent"] service.get_service_properties(raw_response_hook=callback) @@ -639,23 +700,26 @@ def test_user_agent_custom(self, **kwargs): custom_app = "TestApp/v1.0" service = BlobServiceClient( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, user_agent=custom_app) + self.account_url(storage_account_name, "blob"), credential=storage_account_key, user_agent=custom_app + ) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] service.get_service_properties(raw_response_hook=callback) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("TestApp/v2.0 TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "TestApp/v2.0 TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] service.get_service_properties(raw_response_hook=callback, user_agent="TestApp/v2.0") @@ -668,26 +732,29 @@ def test_user_agent_append(self, **kwargs): service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("customer_user_agent azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "customer_user_agent azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] - service.get_service_properties(raw_response_hook=callback, user_agent='customer_user_agent') + service.get_service_properties(raw_response_hook=callback, user_agent="customer_user_agent") @BlobPreparer() def test_error_with_malformed_conn_str(self): # Arrange - for conn_str in ["", "foobar", "foo;bar;baz", ";", "foobar=baz=foo" , "foo=;bar=;", "=", "=;=="]: + for conn_str in ["", "foobar", "foo;bar;baz", ";", "foobar=baz=foo", "foo=;bar=;", "=", "=;=="]: for service_type in SERVICES.items(): # Act with pytest.raises(ValueError) as e: - service = service_type[0].from_connection_string(conn_str, blob_name="test", container_name="foo/bar") + service = service_type[0].from_connection_string( + conn_str, blob_name="test", container_name="foo/bar" + ) - if conn_str in("", "foobar", "foo;bar;baz", ";"): + if conn_str in ("", "foobar", "foo;bar;baz", ";"): assert str(e.value) == "Connection string is either blank or malformed." - elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="): + elif conn_str in ("foobar=baz=foo", "foo=;bar=;", "=", "=;=="): assert str(e.value) == "Connection string missing required connection details." @BlobPreparer() @@ -699,11 +766,15 @@ def test_closing_pipeline_client(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) # Assert with service: - assert hasattr(service, 'close') + assert hasattr(service, "close") service.close() @BlobPreparer() @@ -715,13 +786,17 @@ def test_closing_pipeline_client_simple(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) service.close() @BlobPreparer() def test_create_configuration_legacy(self, **kwargs): # Arrange - sdk_name = 'Blob-test' + sdk_name = "Blob-test" # Act config = create_configuration(storage_sdk=sdk_name) @@ -731,4 +806,5 @@ def test_create_configuration_legacy(self, **kwargs): assert config.max_block_size == 4 * 1024 * 1024 assert sdk_name in config.user_agent_policy.user_agent + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py index 6cd57fefd5b7..9ea8f8eeba57 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py @@ -15,11 +15,7 @@ ResourceTypes, VERSION, ) -from azure.storage.blob.aio import ( - BlobClient, - ContainerClient, - BlobServiceClient -) +from azure.storage.blob.aio import BlobClient, ContainerClient, BlobServiceClient from devtools_testutils.fake_credentials_async import AsyncFakeCredential from devtools_testutils.aio import recorded_by_proxy_async @@ -27,12 +23,12 @@ from settings.testcase import BlobPreparer SERVICES = { - BlobServiceClient: 'blob', - ContainerClient: 'blob', - BlobClient: 'blob', + BlobServiceClient: "blob", + ContainerClient: "blob", + BlobClient: "blob", } -_CONNECTION_ENDPOINTS = {'blob': 'BlobEndpoint'} -_CONNECTION_ENDPOINTS_SECONDARY = {'blob': 'BlobSecondaryEndpoint'} +_CONNECTION_ENDPOINTS = {"blob": "BlobEndpoint"} +_CONNECTION_ENDPOINTS_SECONDARY = {"blob": "BlobSecondaryEndpoint"} class TestStorageClientAsync(AsyncStorageRecordedTestCase): @@ -43,8 +39,8 @@ def validate_standard_account_endpoints(self, service, url_type, account_name, a assert service.account_name == account_name assert service.credential.account_name == account_name assert service.credential.account_key == account_key - assert '{}.{}.core.windows.net'.format(account_name, url_type) in service.url - assert '{}-secondary.{}.core.windows.net'.format(account_name, url_type) in service.secondary_endpoint + assert "{}.{}.core.windows.net".format(account_name, url_type) in service.url + assert "{}-secondary.{}.core.windows.net".format(account_name, url_type) in service.secondary_endpoint def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -68,26 +64,34 @@ def test_create_service_with_key(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) # Assert self.validate_standard_account_endpoints(service, url, storage_account_name, storage_account_key) - assert service.scheme == 'https' + assert service.scheme == "https" @BlobPreparer() def test_create_service_with_connection_string(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - for service_type in SERVICES.items(): # Act service = service_type[0].from_connection_string( - self.connection_string(storage_account_name, storage_account_key), container_name="test", blob_name="test") + self.connection_string(storage_account_name, storage_account_key), + container_name="test", + blob_name="test", + ) # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'https' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "https" @BlobPreparer() def test_create_service_with_sas(self, **kwargs): @@ -98,12 +102,16 @@ def test_create_service_with_sas(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=sas_token, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=sas_token, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.url.endswith(sas_token) assert service.credential is None @@ -118,12 +126,16 @@ def test_create_service_with_sas_credential(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=sas_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=sas_credential, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert not service.url.endswith(sas_token) assert service.credential == sas_credential @@ -139,7 +151,11 @@ def test_create_service_with_sas_credential_url_raises_if_sas_is_in_uri(self, ** # Act with pytest.raises(ValueError): service = service_type( - self.account_url(storage_account_name, "blob") + "?sig=foo", credential=sas_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob") + "?sig=foo", + credential=sas_credential, + container_name="foo", + blob_name="bar", + ) @BlobPreparer() async def test_create_service_with_token(self, **kwargs): @@ -149,11 +165,15 @@ async def test_create_service_with_token(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "blob"), credential=token_credential, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=token_credential, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.credential == token_credential assert service.account_name == storage_account_name @@ -165,8 +185,8 @@ async def test_create_service_with_token_and_http(self, **kwargs): for service_type in SERVICES: # Act with pytest.raises(ValueError): - url = self.account_url(storage_account_name, "blob").replace('https', 'http') - service_type(url, credential=token_credential, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("https", "http") + service_type(url, credential=token_credential, container_name="foo", blob_name="bar") @BlobPreparer() def test_create_service_china(self, **kwargs): @@ -176,9 +196,8 @@ def test_create_service_china(self, **kwargs): # Arrange for service_type in SERVICES.items(): # Act - url = self.account_url(storage_account_name, "blob").replace('core.windows.net', 'core.chinacloudapi.cn') - service = service_type[0]( - url, credential=storage_account_key, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("core.windows.net", "core.chinacloudapi.cn") + service = service_type[0](url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert assert service is not None @@ -186,9 +205,11 @@ def test_create_service_china(self, **kwargs): assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key assert service.primary_endpoint.startswith( - 'https://{}.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) + "https://{}.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) assert service.secondary_endpoint.startswith( - 'https://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) + "https://{}-secondary.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) @BlobPreparer() def test_create_service_protocol(self, **kwargs): @@ -198,13 +219,14 @@ def test_create_service_protocol(self, **kwargs): # Arrange for service_type in SERVICES.items(): # Act - url = self.account_url(storage_account_name, "blob").replace('https', 'http') - service = service_type[0]( - url, credential=storage_account_key, container_name='foo', blob_name='bar') + url = self.account_url(storage_account_name, "blob").replace("https", "http") + service = service_type[0](url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'http' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "http" @BlobPreparer() def test_create_blob_service_anonymous(self, **kwargs): @@ -215,11 +237,13 @@ def test_create_blob_service_anonymous(self, **kwargs): for service_type in BLOB_SERVICES: # Act - service = service_type(self.account_url(storage_account_name, "blob"), container_name='foo', blob_name='bar') + service = service_type( + self.account_url(storage_account_name, "blob"), container_name="foo", blob_name="bar" + ) # Assert assert service is not None - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.credential is None assert service.account_name == storage_account_name @@ -234,18 +258,21 @@ def test_create_blob_service_custom_domain(self, **kwargs): for service_type in BLOB_SERVICES: # Act service = service_type( - 'www.mydomain.com', - credential={'account_name': storage_account_name, 'account_key': storage_account_key}, - container_name='foo', - blob_name='bar') + "www.mydomain.com", + credential={"account_name": storage_account_name, "account_key": storage_account_key}, + container_name="foo", + blob_name="bar", + ) # Assert assert service is not None assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_create_service_with_socket_timeout(self, **kwargs): @@ -257,14 +284,23 @@ def test_create_service_with_socket_timeout(self, **kwargs): for service_type in SERVICES.items(): # Act default_service = service_type[0]( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, - container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) service = service_type[0]( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, - container_name='foo', blob_name='bar', connection_timeout=22) + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + connection_timeout=22, + ) # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] @@ -275,16 +311,17 @@ def test_create_service_with_connection_string_key(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - conn_string = 'AccountName={};AccountKey={};'.format(storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};".format(storage_account_name, storage_account_key) for service_type in SERVICES.items(): # Act - service = service_type[0].from_connection_string( - conn_string, container_name='foo', blob_name='bar') + service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert - self.validate_standard_account_endpoints(service, service_type[1], storage_account_name, storage_account_key) - assert service.scheme == 'https' + self.validate_standard_account_endpoints( + service, service_type[1], storage_account_name, storage_account_key + ) + assert service.scheme == "https" @BlobPreparer() def test_create_service_with_connection_string_sas(self, **kwargs): @@ -292,16 +329,15 @@ def test_create_service_with_connection_string_sas(self, **kwargs): # Arrange sas_token = self.generate_fake_sas_token() - conn_string = 'AccountName={};SharedAccessSignature={};'.format(storage_account_name, sas_token) + conn_string = "AccountName={};SharedAccessSignature={};".format(storage_account_name, sas_token) for service_type in SERVICES: # Act - service = service_type.from_connection_string( - conn_string, container_name='foo', blob_name='bar') + service = service_type.from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert assert service is not None - assert service.url.startswith('https://' + storage_account_name + '.blob.core.windows.net') + assert service.url.startswith("https://" + storage_account_name + ".blob.core.windows.net") assert service.url.endswith(sas_token) assert service.credential is None assert service.account_name == storage_account_name @@ -313,12 +349,12 @@ def test_create_blob_client_with_complete_blob_url(self, **kwargs): # Arrange blob_url = self.account_url(storage_account_name, "blob") + "/foourl/barurl" - service = BlobClient(blob_url, credential=storage_account_key, container_name='foo', blob_name='bar') + service = BlobClient(blob_url, credential=storage_account_key, container_name="foo", blob_name="bar") # Assert - assert service.scheme == 'https' - assert service.container_name == 'foo' - assert service.blob_name == 'bar' + assert service.scheme == "https" + assert service.container_name == "foo" + assert service.blob_name == "bar" assert service.account_name == storage_account_name @BlobPreparer() @@ -327,8 +363,11 @@ def test_creat_serv_w_connstr_endpoint_protocol(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - conn_string = 'AccountName={};AccountKey={};DefaultEndpointsProtocol=http;EndpointSuffix=core.chinacloudapi.cn;'.format( - storage_account_name, storage_account_key) + conn_string = ( + "AccountName={};AccountKey={};DefaultEndpointsProtocol=http;EndpointSuffix=core.chinacloudapi.cn;".format( + storage_account_name, storage_account_key + ) + ) for service_type in SERVICES.items(): # Act @@ -340,10 +379,12 @@ def test_creat_serv_w_connstr_endpoint_protocol(self, **kwargs): assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key assert service.primary_endpoint.startswith( - 'http://{}.{}.core.chinacloudapi.cn/'.format(storage_account_name, service_type[1])) + "http://{}.{}.core.chinacloudapi.cn/".format(storage_account_name, service_type[1]) + ) assert service.secondary_endpoint.startswith( - 'http://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account_name, service_type[1])) - assert service.scheme == 'http' + "http://{}-secondary.{}.core.chinacloudapi.cn".format(storage_account_name, service_type[1]) + ) + assert service.scheme == "http" @BlobPreparer() def test_create_service_with_connection_string_emulated(self, **kwargs): @@ -352,7 +393,7 @@ def test_create_service_with_connection_string_emulated(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'UseDevelopmentStorage=true;'.format(storage_account_name, storage_account_key) + conn_string = "UseDevelopmentStorage=true;".format(storage_account_name, storage_account_key) # Act with pytest.raises(ValueError): @@ -362,7 +403,7 @@ def test_create_service_with_connection_string_emulated(self, **kwargs): def test_create_service_with_connection_string_anonymous(self): # Arrange for service_type in SERVICES.items(): - conn_string = 'BlobEndpoint=www.mydomain.com;' + conn_string = "BlobEndpoint=www.mydomain.com;" # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -371,7 +412,7 @@ def test_create_service_with_connection_string_anonymous(self): assert service is not None assert service.account_name == None assert service.credential is None - assert service.primary_endpoint.startswith('https://www.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") with pytest.raises(ValueError): service.secondary_endpoint @@ -382,8 +423,9 @@ def test_creat_serv_w_connstr_custm_domain(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -393,8 +435,10 @@ def test_creat_serv_w_connstr_custm_domain(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_creat_serv_w_connstr_custm_dom_trailing_slash(self, **kwargs): @@ -403,8 +447,9 @@ def test_creat_serv_w_connstr_custm_dom_trailing_slash(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -414,8 +459,10 @@ def test_creat_serv_w_connstr_custm_dom_trailing_slash(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://' + storage_account_name + '-secondary.blob.core.windows.net') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith( + "https://" + storage_account_name + "-secondary.blob.core.windows.net" + ) @BlobPreparer() def test_creat_serv_w_connstr_custm_dom_2ndry_override(self, **kwargs): @@ -424,20 +471,22 @@ def test_creat_serv_w_connstr_custm_dom_2ndry_override(self, **kwargs): # Arrange for service_type in SERVICES.items(): - conn_string = 'AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;'.format( - storage_account_name, storage_account_key) + conn_string = "AccountName={};AccountKey={};BlobEndpoint=www.mydomain.com/;".format( + storage_account_name, storage_account_key + ) # Act service = service_type[0].from_connection_string( - conn_string, secondary_hostname="www-sec.mydomain.com", container_name="foo", blob_name="bar") + conn_string, secondary_hostname="www-sec.mydomain.com", container_name="foo", blob_name="bar" + ) # Assert assert service is not None assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://www-sec.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith("https://www-sec.mydomain.com/") @BlobPreparer() def test_creat_serv_w_connstr_fail_if_2ndry_wo_primary(self, **kwargs): @@ -446,9 +495,9 @@ def test_creat_serv_w_connstr_fail_if_2ndry_wo_primary(self, **kwargs): for service_type in SERVICES.items(): # Arrange - conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;'.format( - storage_account_name, storage_account_key, - _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1])) + conn_string = "AccountName={};AccountKey={};{}=www.mydomain.com;".format( + storage_account_name, storage_account_key, _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]) + ) # Act @@ -463,11 +512,12 @@ def test_creat_serv_w_connstr_pass_if_2ndry_w_primary(self, **kwargs): for service_type in SERVICES.items(): # Arrange - conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;{}=www-sec.mydomain.com;'.format( + conn_string = "AccountName={};AccountKey={};{}=www.mydomain.com;{}=www-sec.mydomain.com;".format( storage_account_name, storage_account_key, _CONNECTION_ENDPOINTS.get(service_type[1]), - _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1])) + _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]), + ) # Act service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") @@ -477,8 +527,8 @@ def test_creat_serv_w_connstr_pass_if_2ndry_w_primary(self, **kwargs): assert service.account_name == storage_account_name assert service.credential.account_name == storage_account_name assert service.credential.account_key == storage_account_key - assert service.primary_endpoint.startswith('https://www.mydomain.com/') - assert service.secondary_endpoint.startswith('https://www-sec.mydomain.com/') + assert service.primary_endpoint.startswith("https://www.mydomain.com/") + assert service.secondary_endpoint.startswith("https://www-sec.mydomain.com/") def test_create_service_with_custom_account_endpoint_path(self): account_name = "blobstorage" @@ -486,38 +536,38 @@ def test_create_service_with_custom_account_endpoint_path(self): sas_token = self.generate_fake_sas_token() custom_account_url = "http://local-machine:11002/custom/account/path/" + sas_token for service_type in SERVICES.items(): - conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};'.format( - account_name, account_key, custom_account_url) + conn_string = "DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};".format( + account_name, account_key, custom_account_url + ) # Act - service = service_type[0].from_connection_string( - conn_string, container_name="foo", blob_name="bar") + service = service_type[0].from_connection_string(conn_string, container_name="foo", blob_name="bar") # Assert assert service.account_name == account_name assert service.credential.account_name == account_name assert service.credential.account_key == account_key - assert service.primary_hostname == 'local-machine:11002/custom/account/path' + assert service.primary_hostname == "local-machine:11002/custom/account/path" service = BlobServiceClient(account_url=custom_account_url) assert service.account_name == None assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/?') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/?") service = ContainerClient(account_url=custom_account_url, container_name="foo") assert service.account_name == None assert service.container_name == "foo" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/foo?') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/foo?") service = ContainerClient.from_container_url("http://local-machine:11002/custom/account/path/foo?query=value") assert service.account_name == None assert service.container_name == "foo" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url == 'http://local-machine:11002/custom/account/path/foo' + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url == "http://local-machine:11002/custom/account/path/foo" service = BlobClient(account_url=custom_account_url, container_name="foo", blob_name="bar", snapshot="baz") assert service.account_name == None @@ -525,25 +575,31 @@ def test_create_service_with_custom_account_endpoint_path(self): assert service.blob_name == "bar" assert service.snapshot == "baz" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url.startswith('http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&') + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url.startswith("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&") - service = BlobClient.from_blob_url("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value") + service = BlobClient.from_blob_url( + "http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value" + ) assert service.account_name == None assert service.container_name == "foo" assert service.blob_name == "bar" assert service.snapshot == "baz" assert service.credential == None - assert service.primary_hostname == 'local-machine:11002/custom/account/path' - assert service.url == 'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz' + assert service.primary_hostname == "local-machine:11002/custom/account/path" + assert service.url == "http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz" def test_create_blob_client_with_sub_directory_path_in_blob_name(self): - blob_url = "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + blob_url = ( + "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + ) blob_client = BlobClient.from_blob_url(blob_url) assert blob_client.container_name == "containername" assert blob_client.blob_name == "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" - blob_emulator_url = 'http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg' + blob_emulator_url = ( + "http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" + ) blob_client = BlobClient.from_blob_url(blob_emulator_url) assert blob_client.container_name == "containername" assert blob_client.blob_name == "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" @@ -557,18 +613,18 @@ async def test_request_callback_signed_header(self, **kwargs): # Arrange service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - name = self.get_resource_name('cont') + name = self.get_resource_name("cont") # Act def callback(request): - if request.http_request.method == 'PUT': - request.http_request.headers['x-ms-meta-hello'] = 'world' + if request.http_request.method == "PUT": + request.http_request.headers["x-ms-meta-hello"] = "world" # Assert try: container = await service.create_container(name, raw_request_hook=callback) metadata = (await container.get_container_properties()).metadata - assert metadata == {'hello': 'world'} + assert metadata == {"hello": "world"} finally: await service.delete_container(name) @@ -580,7 +636,7 @@ async def test_response_callback(self, **kwargs): # Arrange service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - name = self.get_resource_name('cont') + name = self.get_resource_name("cont") container = service.get_container_client(name) # Act @@ -601,8 +657,8 @@ async def test_user_agent_default(self, **kwargs): service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert "azsdk-python-storage-blob/{}".format(VERSION) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert "azsdk-python-storage-blob/{}".format(VERSION) in response.http_request.headers["User-Agent"] await service.get_service_properties(raw_response_hook=callback) @@ -614,23 +670,26 @@ async def test_user_agent_custom(self, **kwargs): custom_app = "TestApp/v1.0" service = BlobServiceClient( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, user_agent=custom_app) + self.account_url(storage_account_name, "blob"), credential=storage_account_key, user_agent=custom_app + ) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] await service.get_service_properties(raw_response_hook=callback) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("TestApp/v2.0 TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "TestApp/v2.0 TestApp/v1.0 azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] await service.get_service_properties(raw_response_hook=callback, user_agent="TestApp/v2.0") @@ -643,13 +702,14 @@ async def test_user_agent_append(self, **kwargs): service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) def callback(response): - assert 'User-Agent' in response.http_request.headers - assert ("customer_user_agent azsdk-python-storage-blob/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform())) in response.http_request.headers['User-Agent'] + assert "User-Agent" in response.http_request.headers + assert ( + "customer_user_agent azsdk-python-storage-blob/{} Python/{} ({})".format( + VERSION, platform.python_version(), platform.platform() + ) + ) in response.http_request.headers["User-Agent"] - await service.get_service_properties(raw_response_hook=callback, user_agent='customer_user_agent') + await service.get_service_properties(raw_response_hook=callback, user_agent="customer_user_agent") @BlobPreparer() async def test_closing_pipeline_client(self, **kwargs): @@ -661,11 +721,15 @@ async def test_closing_pipeline_client(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) # Assert async with service: - assert hasattr(service, 'close') + assert hasattr(service, "close") await service.close() @BlobPreparer() @@ -678,7 +742,12 @@ async def test_closing_pipeline_client_simple(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "blob"), credential=storage_account_key, container_name='foo', blob_name='bar') + self.account_url(storage_account_name, "blob"), + credential=storage_account_key, + container_name="foo", + blob_name="bar", + ) await service.close() + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py index f7eeadde11de..9dddefecd164 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py @@ -29,16 +29,15 @@ # ------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'encryption_container' -TEST_BLOB_PREFIXES = {'BlockBlob': 'encryption_block_blob', - 'PageBlob': 'encryption_page_blob', - 'AppendBlob': 'foo'} -_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = 'The require_encryption flag is set, but encryption is not supported' + \ - ' for this method.' +TEST_CONTAINER_PREFIX = "encryption_container" +TEST_BLOB_PREFIXES = {"BlockBlob": "encryption_block_blob", "PageBlob": "encryption_page_blob", "AppendBlob": "foo"} +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + "The require_encryption flag is set, but encryption is not supported" + " for this method." +) # ------------------------------------------------------------------------------ -@mock.patch('os.urandom', mock_urandom) +@mock.patch("os.urandom", mock_urandom) class TestStorageBlobEncryption(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self, storage_account_name, key): @@ -49,11 +48,12 @@ def _setup(self, storage_account_name, key): max_block_size=4 * 1024, max_page_size=4 * 1024, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") self.blob_types = (BlobType.BlockBlob, BlobType.PageBlob, BlobType.AppendBlob) - self.bytes = b'Foo' + self.bytes = b"Foo" if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -83,7 +83,7 @@ def test_missing_attribute_kek_wrap(self, **kwargs): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self.bsc.require_encryption = True - valid_key = KeyWrapper('key1') + valid_key = KeyWrapper("key1") # Act invalid_key_1 = lambda: None # functions are objects, so this effectively creates an empty object @@ -117,21 +117,21 @@ def test_invalid_value_kek_wrap(self, **kwargs): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.get_key_wrap_algorithm = None try: self._create_small_blob(BlobType.BlockBlob) self.fail() except AttributeError as e: - assert str(e), _ERROR_OBJECT_INVALID.format('key encryption key' == 'get_key_wrap_algorithm') + assert str(e), _ERROR_OBJECT_INVALID.format("key encryption key" == "get_key_wrap_algorithm") - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.get_kid = None with pytest.raises(AttributeError): self._create_small_blob(BlobType.BlockBlob) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.wrap_key = None with pytest.raises(AttributeError): self._create_small_blob(BlobType.BlockBlob) @@ -144,7 +144,7 @@ def test_missing_attribute_kek_unwrap(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - valid_key = KeyWrapper('key1') + valid_key = KeyWrapper("key1") self.bsc.key_encryption_key = valid_key blob = self._create_small_blob(BlobType.BlockBlob) @@ -173,16 +173,16 @@ def test_invalid_value_kek_unwrap(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = self._create_small_blob(BlobType.BLOCKBLOB) # Act - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.key_encryption_key.unwrap_key = None with pytest.raises(HttpResponseError) as e: blob.download_blob().readall() - assert 'Decryption failed.' in str(e.value.message) + assert "Decryption failed." in str(e.value.message) @BlobPreparer() @recorded_by_proxy @@ -192,7 +192,7 @@ def test_get_blob_kek(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = self._create_small_blob(BlobType.BlockBlob) # Act @@ -209,7 +209,7 @@ def test_get_blob_resolver(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") key_resolver = KeyResolver() key_resolver.put_key(self.bsc.key_encryption_key) self.bsc.key_resolver_function = key_resolver.resolve_key @@ -232,7 +232,7 @@ def test_get_blob_kek_RSA(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = RSAKeyWrapper('key2') + self.bsc.key_encryption_key = RSAKeyWrapper("key2") blob = self._create_small_blob(BlobType.BlockBlob) # Act @@ -249,16 +249,16 @@ def test_get_blob_nonmatching_kid(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = self._create_small_blob(BlobType.BLOCKBLOB) # Act - self.bsc.key_encryption_key.kid = 'Invalid' + self.bsc.key_encryption_key.kid = "Invalid" # Assert with pytest.raises(HttpResponseError) as e: blob.download_blob().readall() - assert 'Decryption failed.' in str(e.value.message) + assert "Decryption failed." in str(e.value.message) @BlobPreparer() @recorded_by_proxy @@ -268,9 +268,9 @@ def test_put_blob_invalid_stream_type(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') - small_stream = StringIO(u'small') - large_stream = StringIO(u'large' * self.config.max_single_put_size) + self.bsc.key_encryption_key = KeyWrapper("key1") + small_stream = StringIO("small") + large_stream = StringIO("large" * self.config.max_single_put_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -278,12 +278,12 @@ def test_put_blob_invalid_stream_type(self, **kwargs): # Block blob specific single shot with pytest.raises(TypeError) as e: blob.upload_blob(small_stream, length=5) - assert 'Blob data should be of type bytes.' in str(e.value) + assert "Blob data should be of type bytes." in str(e.value) # Generic blob chunked with pytest.raises(TypeError) as e: blob.upload_blob(large_stream) - assert 'Blob data should be of type bytes.' in str(e.value) + assert "Blob data should be of type bytes." in str(e.value) @pytest.mark.live_test_only @BlobPreparer() @@ -293,10 +293,9 @@ def test_put_blob_chunking_required_mult_of_block_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = self.get_random_bytes( - self.config.max_single_put_size + self.config.max_block_size) + content = self.get_random_bytes(self.config.max_single_put_size + self.config.max_block_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -315,7 +314,7 @@ def test_put_blob_chunking_required_non_mult_of_block_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = urandom(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -336,21 +335,18 @@ def test_put_blob_chunking_required_range_specified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size * 2) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - blob.upload_blob( - content, - length=self.config.max_single_put_size + 53, - max_concurrency=3) + blob.upload_blob(content, length=self.config.max_single_put_size + 53, max_concurrency=3) blob_content = blob.download_blob(max_concurrency=3).readall() # Assert - assert content[:self.config.max_single_put_size + 53] == blob_content + assert content[: self.config.max_single_put_size + 53] == blob_content @BlobPreparer() @recorded_by_proxy @@ -359,9 +355,9 @@ def test_put_block_blob_single_shot(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = b'small' + content = b"small" blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -380,8 +376,8 @@ def test_put_blob_range(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') - content = b'Random repeats' * self.config.max_single_put_size * 5 + self.bsc.key_encryption_key = KeyWrapper("key1") + content = b"Random repeats" * self.config.max_single_put_size * 5 # All page blob uploads call _upload_chunks, so this will test the ability # of that function to handle ranges even though it's a small blob @@ -389,14 +385,11 @@ def test_put_blob_range(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - blob.upload_blob( - content[2:], - length=self.config.max_single_put_size + 5, - max_concurrency=1) + blob.upload_blob(content[2:], length=self.config.max_single_put_size + 5, max_concurrency=1) blob_content = blob.download_blob().readall() # Assert - assert content[2:2 + self.config.max_single_put_size + 5] == blob_content + assert content[2 : 2 + self.config.max_single_put_size + 5] == blob_content @BlobPreparer() @recorded_by_proxy @@ -405,9 +398,9 @@ def test_put_blob_empty(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = b'' + content = b"" blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -425,7 +418,7 @@ def test_put_blob_serial_upload_chunking(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -445,7 +438,7 @@ def test_get_blob_range_beginning_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -465,7 +458,7 @@ def test_get_blob_range_middle_to_end(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -487,7 +480,7 @@ def test_get_blob_range_middle_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -507,7 +500,7 @@ def test_get_blob_range_aligns_on_16_byte_block(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -527,7 +520,7 @@ def test_get_blob_range_expanded_to_beginning_block_align(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -547,7 +540,7 @@ def test_get_blob_range_expanded_to_beginning_iv(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -567,10 +560,10 @@ def test_get_blob_range_cross_chunk(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - data = b'12345' * 205 * 3 # 3075 bytes + data = b"12345" * 205 * 3 # 3075 bytes blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -580,7 +573,7 @@ def test_get_blob_range_cross_chunk(self, **kwargs): blob_content = blob.download_blob(offset=offset, length=length).readall() # Assert - assert data[offset:offset + length] == blob_content + assert data[offset : offset + length] == blob_content @BlobPreparer() @recorded_by_proxy @@ -590,7 +583,7 @@ def test_put_blob_strict_mode(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - content = b'Hello world' + content = b"Hello world" # Assert for service in self.blob_types: @@ -611,7 +604,7 @@ def test_put_blob_strict_mode(self, **kwargs): blob.upload_blob(temp_file, blob_type=service) with pytest.raises(ValueError): - blob.upload_blob('To encrypt', blob_type=service) + blob.upload_blob("To encrypt", blob_type=service) @BlobPreparer() @recorded_by_proxy @@ -621,7 +614,7 @@ def test_get_blob_strict_mode_no_policy(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = self._create_small_blob(BlobType.BlockBlob) # Act @@ -642,7 +635,7 @@ def test_get_blob_strict_mode_unencrypted_blob(self, **kwargs): # Act blob.require_encryption = True - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") # Assert with pytest.raises(HttpResponseError): @@ -655,17 +648,17 @@ def test_invalid_methods_fail_block(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - blob.stage_block('block1', b'hello world') + blob.stage_block("block1", b"hello world") assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: - blob.commit_block_list(['block1']) + blob.commit_block_list(["block1"]) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION @BlobPreparer() @@ -675,13 +668,13 @@ def test_invalid_methods_fail_append(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.AppendBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - blob.append_block(b'hello world') + blob.append_block(b"hello world") assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: @@ -690,7 +683,7 @@ def test_invalid_methods_fail_append(self, **kwargs): # All append_from operations funnel into append_from_stream, so testing one is sufficient with pytest.raises(ValueError) as e: - blob.upload_blob(b'To encrypt', blob_type=BlobType.AppendBlob) + blob.upload_blob(b"To encrypt", blob_type=BlobType.AppendBlob) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION @BlobPreparer() @@ -700,13 +693,13 @@ def test_invalid_methods_fail_page(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.PageBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - blob.upload_page(b'a' * 512, offset=0, length=512) + blob.upload_page(b"a" * 512, offset=0, length=512) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: @@ -721,7 +714,7 @@ def test_validate_encryption(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.bsc.key_encryption_key = kek blob = self._create_small_blob(BlobType.BlockBlob) @@ -731,7 +724,7 @@ def test_validate_encryption(self, **kwargs): content = blob.download_blob() data = content.readall() - encryption_data = _dict_to_encryption_data(loads(content.properties.metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(content.properties.metadata["encryptiondata"])) iv = encryption_data.content_encryption_IV content_encryption_key = _validate_and_unwrap_cek(encryption_data, kek, None) cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) @@ -760,7 +753,7 @@ def test_create_block_blob_from_star(self, **kwargs): temp_file.seek(0) self._create_blob_from_star(BlobType.BlockBlob, self.bytes, temp_file) - self._create_blob_from_star(BlobType.BlockBlob, b'To encrypt', 'To encrypt') + self._create_blob_from_star(BlobType.BlockBlob, b"To encrypt", "To encrypt") @BlobPreparer() @recorded_by_proxy @@ -780,7 +773,7 @@ def test_create_page_blob_from_star(self, **kwargs): stream.write(content) stream.close() - with open(path_name, 'rb') as stream: + with open(path_name, "rb") as stream: self._create_blob_from_star(BlobType.PageBlob, content, stream) unlink(stream.name) @@ -788,7 +781,7 @@ def test_create_page_blob_from_star(self, **kwargs): def _create_blob_from_star(self, blob_type, content, data, **kwargs): blob_name = self._get_blob_reference(blob_type) blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.require_encryption = True blob.upload_blob(data, blob_type=blob_type, **kwargs) @@ -804,7 +797,7 @@ def test_get_blob_to_star(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = self._create_small_blob(BlobType.BlockBlob) # Act @@ -813,7 +806,7 @@ def test_get_blob_to_star(self, **kwargs): stream_blob = BytesIO() blob.download_blob().download_to_stream(stream_blob) stream_blob.seek(0) - text_blob = blob.download_blob(encoding='UTF-8').readall() + text_blob = blob.download_blob(encoding="UTF-8").readall() # Assert assert self.bytes == iter_blob @@ -829,9 +822,9 @@ def test_get_blob_read(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") - data = b'12345' * 205 * 25 # 25625 bytes + data = b"12345" * 205 * 25 # 25625 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference(BlobType.BLOCKBLOB)) blob.upload_blob(data, overwrite=True) stream = blob.download_blob(max_concurrency=3) @@ -858,9 +851,9 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference(BlobType.BLOCKBLOB)) blob.upload_blob(data, overwrite=True) offset, length = 501, 5000 @@ -870,15 +863,15 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = stream.read(100) # Read in first chunk second = stream.readall() - assert first == data[offset:offset + 100] - assert second == data[offset + 100:offset + length] + assert first == data[offset : offset + 100] + assert second == data[offset + 100 : offset + length] stream = blob.download_blob(offset=offset, length=length) first = stream.read(3000) # Read past first chunk second = stream.readall() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] stream = blob.download_blob(offset=offset, length=length) first = stream.read(3000) # Read past first chunk @@ -886,8 +879,9 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_size = stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] assert read_size == len(second) + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py index d715060ef6e8..dd38f7e3f3db 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py @@ -30,16 +30,15 @@ # ------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'encryption_container' -TEST_BLOB_PREFIXES = {'BlockBlob': 'encryption_block_blob', - 'PageBlob': 'encryption_page_blob', - 'AppendBlob': 'foo'} -_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = 'The require_encryption flag is set, but encryption is not supported' + \ - ' for this method.' +TEST_CONTAINER_PREFIX = "encryption_container" +TEST_BLOB_PREFIXES = {"BlockBlob": "encryption_block_blob", "PageBlob": "encryption_page_blob", "AppendBlob": "foo"} +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + "The require_encryption flag is set, but encryption is not supported" + " for this method." +) # ------------------------------------------------------------------------------ -@mock.patch('os.urandom', mock_urandom) +@mock.patch("os.urandom", mock_urandom) class TestStorageBlobEncryptionAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- @@ -53,11 +52,12 @@ async def _setup(self, storage_account_name, key): max_single_put_size=32 * 1024, max_block_size=4 * 1024, max_page_size=4 * 1024, - max_single_get_size=4 * 1024) + max_single_get_size=4 * 1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") self.blob_types = (BlobType.BlockBlob, BlobType.PageBlob, BlobType.AppendBlob) - self.bytes = b'Foo' + self.bytes = b"Foo" if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -87,7 +87,7 @@ async def test_missing_attribute_kek_wrap(self, **kwargs): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self.bsc.require_encryption = True - valid_key = KeyWrapper('key1') + valid_key = KeyWrapper("key1") # Act invalid_key_1 = lambda: None # functions are objects, so this effectively creates an empty object @@ -121,21 +121,21 @@ async def test_invalid_value_kek_wrap(self, **kwargs): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.get_key_wrap_algorithm = None try: await self._create_small_blob(BlobType.BlockBlob) self.fail() except AttributeError as e: - assert str(e), _ERROR_OBJECT_INVALID.format('key encryption key' == 'get_key_wrap_algorithm') + assert str(e), _ERROR_OBJECT_INVALID.format("key encryption key" == "get_key_wrap_algorithm") - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.get_kid = None with pytest.raises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.key_encryption_key.wrap_key = None with pytest.raises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) @@ -148,24 +148,24 @@ async def test_missing_attribute_kek_unwrap(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - valid_key = KeyWrapper('key1') + valid_key = KeyWrapper("key1") self.bsc.key_encryption_key = valid_key blob = await self._create_small_blob(BlobType.BlockBlob) # Act # Note that KeyWrapper has a default value for key_id, so these Exceptions # are not due to non_matching kids. - invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object + invalid_key_1 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_1.get_kid = valid_key.get_kid - #No attribute unwrap_key + # No attribute unwrap_key blob.key_encryption_key = invalid_key_1 with pytest.raises(HttpResponseError): await (await blob.download_blob()).readall() - invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object + invalid_key_2 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_2.unwrap_key = valid_key.unwrap_key blob.key_encryption_key = invalid_key_2 - #No attribute get_kid + # No attribute get_kid with pytest.raises(HttpResponseError): await (await blob.download_blob()).readall() @@ -177,16 +177,16 @@ async def test_invalid_value_kek_unwrap(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = await self._create_small_blob(BlobType.BLOCKBLOB) # Act - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.key_encryption_key.unwrap_key = None with pytest.raises(HttpResponseError) as e: await (await blob.download_blob()).readall() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy_async @@ -196,7 +196,7 @@ async def test_get_blob_kek(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = await self._create_small_blob(BlobType.BlockBlob) # Act @@ -213,7 +213,7 @@ async def test_get_blob_resolver(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") key_resolver = KeyResolver() key_resolver.put_key(self.bsc.key_encryption_key) self.bsc.key_resolver_function = key_resolver.resolve_key @@ -236,7 +236,7 @@ async def test_get_blob_kek_RSA(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = RSAKeyWrapper('key2') + self.bsc.key_encryption_key = RSAKeyWrapper("key2") blob = await self._create_small_blob(BlobType.BlockBlob) # Act @@ -256,16 +256,16 @@ async def test_get_blob_nonmatching_kid(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = await self._create_small_blob(BlobType.BLOCKBLOB) # Act - self.bsc.key_encryption_key.kid = 'Invalid' + self.bsc.key_encryption_key.kid = "Invalid" # Assert with pytest.raises(HttpResponseError) as e: await (await blob.download_blob()).readall() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy_async @@ -275,9 +275,9 @@ async def test_put_blob_invalid_stream_type(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') - small_stream = StringIO(u'small') - large_stream = StringIO(u'large' * self.config.max_single_put_size) + self.bsc.key_encryption_key = KeyWrapper("key1") + small_stream = StringIO("small") + large_stream = StringIO("large" * self.config.max_single_put_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -285,12 +285,12 @@ async def test_put_blob_invalid_stream_type(self, **kwargs): # Block blob specific single shot with pytest.raises(TypeError) as e: await blob.upload_blob(small_stream, length=5) - assert 'Blob data should be of type bytes.' in str(e.value) + assert "Blob data should be of type bytes." in str(e.value) # Generic blob chunked with pytest.raises(TypeError) as e: await blob.upload_blob(large_stream) - assert 'Blob data should be of type bytes.' in str(e.value) + assert "Blob data should be of type bytes." in str(e.value) @pytest.mark.live_test_only @BlobPreparer() @@ -300,10 +300,9 @@ async def test_put_blob_chunking_required_mult_of_block_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = self.get_random_bytes( - self.config.max_single_put_size + self.config.max_block_size) + content = self.get_random_bytes(self.config.max_single_put_size + self.config.max_block_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -322,7 +321,7 @@ async def test_put_blob_chunking_required_non_mult_of_block_size(self, **kwargs) storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = urandom(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -343,21 +342,18 @@ async def test_put_blob_chunking_required_range_specified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size * 2) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - await blob.upload_blob( - content, - length=self.config.max_single_put_size + 53, - max_concurrency=3) + await blob.upload_blob(content, length=self.config.max_single_put_size + 53, max_concurrency=3) blob_content = await (await blob.download_blob(max_concurrency=3)).readall() # Assert - assert content[:self.config.max_single_put_size + 53] == blob_content + assert content[: self.config.max_single_put_size + 53] == blob_content @BlobPreparer() @recorded_by_proxy_async @@ -366,9 +362,9 @@ async def test_put_block_blob_single_shot(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = b'small' + content = b"small" blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -387,8 +383,8 @@ async def test_put_blob_range(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') - content = b'Random repeats' * self.config.max_single_put_size * 5 + self.bsc.key_encryption_key = KeyWrapper("key1") + content = b"Random repeats" * self.config.max_single_put_size * 5 # All page blob uploads call _upload_chunks, so this will test the ability # of that function to handle ranges even though it's a small blob @@ -396,14 +392,11 @@ async def test_put_blob_range(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - await blob.upload_blob( - content[2:], - length=self.config.max_single_put_size + 5, - max_concurrency=1) + await blob.upload_blob(content[2:], length=self.config.max_single_put_size + 5, max_concurrency=1) blob_content = await (await blob.download_blob()).readall() # Assert - assert content[2:2 + self.config.max_single_put_size + 5] == blob_content + assert content[2 : 2 + self.config.max_single_put_size + 5] == blob_content @BlobPreparer() @recorded_by_proxy_async @@ -412,9 +405,9 @@ async def test_put_blob_empty(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - content = b'' + content = b"" blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -432,7 +425,7 @@ async def test_put_blob_serial_upload_chunking(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -452,7 +445,7 @@ async def test_get_blob_range_beginning_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -472,7 +465,7 @@ async def test_get_blob_range_middle_to_end(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -494,7 +487,7 @@ async def test_get_blob_range_middle_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -514,7 +507,7 @@ async def test_get_blob_range_aligns_on_16_byte_block(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -534,7 +527,7 @@ async def test_get_blob_range_expnded_to_begin_bloc_align(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -554,7 +547,7 @@ async def test_get_blob_range_expanded_to_beginning_iv(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) @@ -574,10 +567,10 @@ async def test_get_blob_range_cross_chunk(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc.require_encryption = True - data = b'12345' * 205 * 3 # 3075 bytes + data = b"12345" * 205 * 3 # 3075 bytes blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(data, overwrite=True) @@ -587,7 +580,7 @@ async def test_get_blob_range_cross_chunk(self, **kwargs): blob_content = await (await blob.download_blob(offset=offset, length=length)).readall() # Assert - assert data[offset:offset + length] == blob_content + assert data[offset : offset + length] == blob_content @BlobPreparer() @recorded_by_proxy_async @@ -597,7 +590,7 @@ async def test_put_blob_strict_mode(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - content = b'Hello world' + content = b"Hello world" # Assert for service in self.blob_types: @@ -617,7 +610,7 @@ async def test_put_blob_strict_mode(self, **kwargs): with pytest.raises(ValueError): await blob.upload_blob(temp_file, blob_type=service) with pytest.raises(ValueError): - await blob.upload_blob('To encrypt', blob_type=service) + await blob.upload_blob("To encrypt", blob_type=service) @BlobPreparer() @recorded_by_proxy_async @@ -627,7 +620,7 @@ async def test_get_blob_strict_mode_no_policy(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = await self._create_small_blob(BlobType.BlockBlob) # Act @@ -648,7 +641,7 @@ async def test_get_blob_strict_mode_unencrypted_blob(self, **kwargs): # Act blob.require_encryption = True - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") # Assert with pytest.raises(HttpResponseError): @@ -661,17 +654,17 @@ async def test_invalid_methods_fail_block(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - await blob.stage_block('block1', b'hello world') + await blob.stage_block("block1", b"hello world") assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: - await blob.commit_block_list(['block1']) + await blob.commit_block_list(["block1"]) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION @BlobPreparer() @@ -681,13 +674,13 @@ async def test_invalid_methods_fail_append(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.AppendBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - await blob.append_block(b'hello world') + await blob.append_block(b"hello world") assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: @@ -696,7 +689,7 @@ async def test_invalid_methods_fail_append(self, **kwargs): # All append_from operations funnel into append_from_stream, so testing one is sufficient with pytest.raises(ValueError) as e: - await blob.upload_blob(b'To encrypt', blob_type=BlobType.AppendBlob) + await blob.upload_blob(b"To encrypt", blob_type=BlobType.AppendBlob) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION @BlobPreparer() @@ -706,13 +699,13 @@ async def test_invalid_methods_fail_page(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob_name = self._get_blob_reference(BlobType.PageBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with pytest.raises(ValueError) as e: - await blob.upload_page(b'a' * 512, offset=0, length=512, blob_type=BlobType.PageBlob) + await blob.upload_page(b"a" * 512, offset=0, length=512, blob_type=BlobType.PageBlob) assert str(e.value) == _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION with pytest.raises(ValueError) as e: @@ -727,7 +720,7 @@ async def test_validate_encryption(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.bsc.key_encryption_key = kek blob = await self._create_small_blob(BlobType.BlockBlob) @@ -737,7 +730,7 @@ async def test_validate_encryption(self, **kwargs): content = await blob.download_blob() data = await content.readall() - encryption_data = _dict_to_encryption_data(loads(content.properties.metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(content.properties.metadata["encryptiondata"])) iv = encryption_data.content_encryption_IV content_encryption_key = _validate_and_unwrap_cek(encryption_data, kek, None) cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) @@ -765,7 +758,7 @@ async def test_create_block_blob_from_star(self, **kwargs): temp_file.write(self.bytes) temp_file.seek(0) await self._create_blob_from_star(BlobType.BlockBlob, "blob3", self.bytes, temp_file) - await self._create_blob_from_star(BlobType.BlockBlob, "blob4", b'To encrypt', 'To encrypt') + await self._create_blob_from_star(BlobType.BlockBlob, "blob4", b"To encrypt", "To encrypt") @BlobPreparer() @recorded_by_proxy_async @@ -787,7 +780,7 @@ async def test_create_page_blob_from_star(self, **kwargs): async def _create_blob_from_star(self, blob_type, blob_name, content, data, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.require_encryption = True await blob.upload_blob(data, blob_type=blob_type, **kwargs) @@ -802,7 +795,7 @@ async def test_get_blob_to_star(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") blob = await self._create_small_blob(BlobType.BlockBlob) # Act @@ -830,9 +823,9 @@ async def test_get_blob_read(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") - data = b'12345' * 205 * 25 # 25625 bytes + data = b"12345" * 205 * 25 # 25625 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference(BlobType.BLOCKBLOB)) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob(max_concurrency=3) @@ -859,11 +852,11 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True - self.bsc.key_encryption_key = KeyWrapper('key1') + self.bsc.key_encryption_key = KeyWrapper("key1") self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference(BlobType.BLOCKBLOB)) await blob.upload_blob(data, overwrite=True) offset, length = 501, 5000 @@ -873,15 +866,15 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = await stream.read(100) # Read in first chunk second = await stream.readall() - assert first == data[offset:offset + 100] - assert second == data[offset + 100:offset + length] + assert first == data[offset : offset + 100] + assert second == data[offset + 100 : offset + length] stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(3000) # Read past first chunk second = await stream.readall() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(3000) # Read past first chunk @@ -889,8 +882,8 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_size = await stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] assert read_size == len(second) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2.py index fd78184903c6..e26ba75ceba9 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2.py @@ -28,18 +28,16 @@ from encryption_test_helper import KeyResolver, KeyWrapper, mock_urandom, RSAKeyWrapper from settings.testcase import BlobPreparer -TEST_CONTAINER_PREFIX = 'encryptionv2_container' -TEST_BLOB_PREFIX = 'encryptionv2_blob' +TEST_CONTAINER_PREFIX = "encryptionv2_container" +TEST_BLOB_PREFIX = "encryptionv2_blob" MiB = 1024 * 1024 class TestStorageBlobEncryptionV2(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self, storage_account_name, key): - self.bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=key) - self.container_name = self.get_resource_name('utcontainer') + self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) + self.container_name = self.get_resource_name("utcontainer") if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -56,8 +54,9 @@ def _get_blob_reference(self): def enable_encryption_v2(self, kek): self.bsc.require_encryption = True - self.bsc.encryption_version = '2.0' + self.bsc.encryption_version = "2.0" self.bsc.key_encryption_key = kek + # -------------------------------------------------------------------------- @BlobPreparer() @@ -66,29 +65,29 @@ def test_v2_blocked_for_page_blob_upload(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) # Act with pytest.raises(ValueError): - blob.upload_blob(b'Test', blob_type=BlobType.PAGEBLOB) + blob.upload_blob(b"Test", blob_type=BlobType.PAGEBLOB) @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_validate_encryption(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act blob.upload_blob(content, overwrite=True) @@ -98,11 +97,11 @@ def test_validate_encryption(self, **kwargs): metadata = blob.get_blob_properties().metadata encrypted_data = blob.download_blob().readall() - encryption_data = _dict_to_encryption_data(loads(metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(metadata["encryptiondata"])) encryption_agent = encryption_data.encryption_agent - assert '2.0' == encryption_agent.protocol - assert 'AES_GCM_256' == encryption_agent.encryption_algorithm + assert "2.0" == encryption_agent.protocol + assert "AES_GCM_256" == encryption_agent.encryption_algorithm encrypted_region_info = encryption_data.encrypted_region_info assert _GCM_NONCE_LENGTH == encrypted_region_info.nonce_length @@ -124,24 +123,25 @@ def test_validate_encryption(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_validate_encryption_chunked_upload(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'a' * 5 * 1024 + content = b"a" * 5 * 1024 # Act blob.upload_blob(content, overwrite=True) @@ -151,11 +151,11 @@ def test_validate_encryption_chunked_upload(self, **kwargs): metadata = blob.get_blob_properties().metadata encrypted_data = blob.download_blob().readall() - encryption_data = _dict_to_encryption_data(loads(metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(metadata["encryptiondata"])) encryption_agent = encryption_data.encryption_agent - assert '2.0' == encryption_agent.protocol - assert 'AES_GCM_256' == encryption_agent.encryption_algorithm + assert "2.0" == encryption_agent.protocol + assert "AES_GCM_256" == encryption_agent.encryption_algorithm encrypted_region_info = encryption_data.encrypted_region_info assert _GCM_NONCE_LENGTH == encrypted_region_info.nonce_length @@ -177,17 +177,17 @@ def test_validate_encryption_chunked_upload(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_kek(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act blob.upload_blob(content, overwrite=True) @@ -205,11 +205,11 @@ def test_encryption_kek_rsa(self, **kwargs): # We can only generate random RSA keys, so this must be run live or # the playback test will fail due to a change in kek values. self._setup(storage_account_name, storage_account_key) - kek = RSAKeyWrapper('key2') + kek = RSAKeyWrapper("key2") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act blob.upload_blob(content, overwrite=True) @@ -220,20 +220,20 @@ def test_encryption_kek_rsa(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_kek_resolver(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) key_resolver = KeyResolver() key_resolver.put_key(self.bsc.key_encryption_key) self.bsc.key_resolver_function = key_resolver.resolve_key blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act self.bsc.key_encryption_key = None @@ -248,25 +248,25 @@ def test_encryption_kek_resolver(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_with_blob_lease(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" - blob.upload_blob(b'', overwrite=True) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + blob.upload_blob(b"", overwrite=True) + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act blob.upload_blob(content, overwrite=True, lease=lease) with pytest.raises(HttpResponseError): - blob.download_blob(lease='00000000-1111-2222-3333-444444444445') + blob.download_blob(lease="00000000-1111-2222-3333-444444444445") data = blob.download_blob(lease=lease).readall() @@ -275,27 +275,27 @@ def test_encryption_with_blob_lease(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_with_if_match(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" - resp = blob.upload_blob(b'', overwrite=True) - etag = resp['etag'] + resp = blob.upload_blob(b"", overwrite=True) + etag = resp["etag"] # Act resp = blob.upload_blob(content, overwrite=True, etag=etag, match_condition=MatchConditions.IfNotModified) - etag = resp['etag'] + etag = resp["etag"] with pytest.raises(HttpResponseError): - blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) data = blob.download_blob(etag=etag, match_condition=MatchConditions.IfNotModified).readall() @@ -304,19 +304,19 @@ def test_encryption_with_if_match(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_decryption_on_non_encrypted_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Not Encrypted!' + content = b"Hello World Not Encrypted!" blob.upload_blob(content, overwrite=True) # Act - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.require_encryption = True with pytest.raises(HttpResponseError): @@ -330,91 +330,91 @@ def test_decryption_on_non_encrypted_blob(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_v2_v1_downgrade(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Upload blob with encryption V2 blob.upload_blob(content, overwrite=True) # Modify metadata to look like V1 metadata = blob.get_blob_properties().metadata - encryption_data = loads(metadata['encryptiondata']) - encryption_data['EncryptionAgent']['Protocol'] = '1.0' - encryption_data['EncryptionAgent']['EncryptionAlgorithm'] = 'AES_CBC_256' + encryption_data = loads(metadata["encryptiondata"]) + encryption_data["EncryptionAgent"]["Protocol"] = "1.0" + encryption_data["EncryptionAgent"]["EncryptionAlgorithm"] = "AES_CBC_256" iv = base64.b64encode(os.urandom(16)) - encryption_data['ContentEncryptionIV'] = iv.decode('utf-8') - metadata = {'encryptiondata': dumps(encryption_data)} + encryption_data["ContentEncryptionIV"] = iv.decode("utf-8") + metadata = {"encryptiondata": dumps(encryption_data)} # Act / Assert blob.set_blob_metadata(metadata) with pytest.raises(HttpResponseError) as e: blob.download_blob() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_modify_cek(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" blob.upload_blob(content, overwrite=True) # Modify cek to not include the version metadata = blob.get_blob_properties().metadata - encryption_data = loads(metadata['encryptiondata']) - encrypted_key = base64.b64decode(encryption_data['WrappedContentKey']['EncryptedKey']) - cek = kek.unwrap_key(encrypted_key, 'A256KW') + encryption_data = loads(metadata["encryptiondata"]) + encrypted_key = base64.b64decode(encryption_data["WrappedContentKey"]["EncryptedKey"]) + cek = kek.unwrap_key(encrypted_key, "A256KW") encrypted_key = kek.wrap_key(cek[8:]) encrypted_key = base64.b64encode(encrypted_key).decode() - encryption_data['WrappedContentKey']['EncryptedKey'] = encrypted_key - metadata = {'encryptiondata': dumps(encryption_data)} + encryption_data["WrappedContentKey"]["EncryptedKey"] = encrypted_key + metadata = {"encryptiondata": dumps(encryption_data)} # Act / Assert blob.set_blob_metadata(metadata) with pytest.raises(HttpResponseError) as e: blob.download_blob() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_case_insensitive_metadata_key(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Upload blob with encryption V2 blob.upload_blob(content, overwrite=True) # Change the case of the metadata key metadata = blob.get_blob_properties().metadata - encryption_data = metadata['encryptiondata'] - metadata = {'Encryptiondata': encryption_data} + encryption_data = metadata["encryptiondata"] + metadata = {"Encryptiondata": encryption_data} blob.set_blob_metadata(metadata) # Act @@ -425,17 +425,17 @@ def test_case_insensitive_metadata_key(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_put_blob_empty(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'' + content = b"" # Act blob.upload_blob(content, overwrite=True) @@ -446,24 +446,25 @@ def test_put_blob_empty(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_put_blob_single_region_chunked(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 1024 + content = b"abcde" * 1024 # Act blob.upload_blob(content, overwrite=True) @@ -479,18 +480,19 @@ def test_put_blob_multi_region_chunked_size_equal_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -506,18 +508,19 @@ def test_put_blob_multi_region_chunked_size_equal_region_concurrent(self, **kwar storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True, max_concurrency=3) @@ -533,18 +536,19 @@ def test_put_blob_multi_region_chunked_size_less_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=2 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -560,18 +564,19 @@ def test_put_blob_multi_region_chunked_size_greater_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=6 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -587,29 +592,30 @@ def test_put_blob_other_data_types(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" length = len(content) byte_io = BytesIO(content) def generator(): - yield b'Hello ' - yield b'World ' - yield b'Encrypted!' + yield b"Hello " + yield b"World " + yield b"Encrypted!" def text_generator(): - yield 'Hello ' - yield 'World ' - yield 'Encrypted!' + yield "Hello " + yield "World " + yield "Encrypted!" data_list = [byte_io, generator(), text_generator()] @@ -628,29 +634,30 @@ def test_put_blob_other_data_types_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 1030 # 5 KiB + 30 + content = b"abcde" * 1030 # 5 KiB + 30 byte_io = BytesIO(content) def generator(): for i in range(0, len(content), 500): - yield content[i: i + 500] + yield content[i : i + 500] def text_generator(): - s_content = str(content, encoding='utf-8') + s_content = str(content, encoding="utf-8") for i in range(0, len(s_content), 500): - yield s_content[i: i + 500] + yield s_content[i : i + 500] data_list = [byte_io, generator(), text_generator()] @@ -669,18 +676,18 @@ def test_get_blob_range_single_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act blob.upload_blob(content, overwrite=True) data = blob.download_blob(offset=0, length=4 * MiB).readall() # Assert - assert content[:4 * MiB] == data + assert content[: 4 * MiB] == data @pytest.mark.live_test_only @BlobPreparer() @@ -689,11 +696,11 @@ def test_get_blob_range_multiple_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act blob.upload_blob(content, overwrite=True) @@ -709,11 +716,11 @@ def test_get_blob_range_single_region_beginning_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB # Act blob.upload_blob(content, overwrite=True) @@ -729,11 +736,11 @@ def test_get_blob_range_single_region_middle_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB # Act blob.upload_blob(content, overwrite=True) @@ -749,11 +756,11 @@ def test_get_blob_range_single_region_middle_to_end(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB length = len(content) # Act @@ -761,7 +768,7 @@ def test_get_blob_range_single_region_middle_to_end(self, **kwargs): data = blob.download_blob(offset=length - 1000000, length=1000000).readall() # Assert - assert content[length - 1000000:] == data + assert content[length - 1000000 :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -770,18 +777,18 @@ def test_get_blob_range_cross_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act blob.upload_blob(content, overwrite=True) - data = blob.download_blob(offset=3*1024*1024, length=2*1024*1024).readall() + data = blob.download_blob(offset=3 * 1024 * 1024, length=2 * 1024 * 1024).readall() # Assert - assert content[3*1024*1024:5*1024*1024] == data + assert content[3 * 1024 * 1024 : 5 * 1024 * 1024] == data @pytest.mark.live_test_only @BlobPreparer() @@ -790,18 +797,18 @@ def test_get_blob_range_inside_second_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act blob.upload_blob(content, overwrite=True) data = blob.download_blob(offset=5 * MiB, length=MiB).readall() # Assert - assert content[5 * MiB:6 * MiB] == data + assert content[5 * MiB : 6 * MiB] == data @pytest.mark.live_test_only @BlobPreparer() @@ -810,18 +817,18 @@ def test_get_blob_range_oversize_length(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act blob.upload_blob(content, overwrite=True) data = blob.download_blob(offset=1 * MiB, length=7 * MiB).readall() # Assert - assert content[1 * MiB:] == data + assert content[1 * MiB :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -830,18 +837,18 @@ def test_get_blob_range_boundary(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act blob.upload_blob(content, overwrite=True) data = blob.download_blob(offset=4 * MiB - 1, length=4 * MiB + 2).readall() # Assert - assert content[4 * MiB - 1:] == data + assert content[4 * MiB - 1 :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -850,18 +857,19 @@ def test_get_blob_chunked_size_equal_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -877,18 +885,19 @@ def test_get_blob_range_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB blob.upload_blob(content, overwrite=True) # Act @@ -896,7 +905,7 @@ def test_get_blob_range_chunked(self, **kwargs): data = blob.download_blob(offset=offset, length=length).readall() # Assert - assert content[offset:offset + length] == data + assert content[offset : offset + length] == data @pytest.mark.live_test_only @BlobPreparer() @@ -905,18 +914,19 @@ def test_get_blob_chunked_size_equal_region_size_concurrent(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 4 * MiB # 20 MiB + content = b"abcde" * 4 * MiB # 20 MiB # Act blob.upload_blob(content, overwrite=True) @@ -932,18 +942,19 @@ def test_get_blob_chunked_size_less_than_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=2 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -959,18 +970,19 @@ def test_get_blob_chunked_size_greater_than_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=6 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -986,18 +998,19 @@ def test_get_blob_using_chunks_iter(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act blob.upload_blob(content, overwrite=True) @@ -1005,7 +1018,7 @@ def test_get_blob_using_chunks_iter(self, **kwargs): total = 0 for chunk in chunks_iter: - assert content[total:total+len(chunk)] == chunk + assert content[total : total + len(chunk)] == chunk total += len(chunk) # Assert @@ -1018,18 +1031,19 @@ def test_get_blob_using_read(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = b'abcde' * 4 * MiB # 20 MiB + data = b"abcde" * 4 * MiB # 20 MiB blob.upload_blob(data, overwrite=True) # Act @@ -1055,18 +1069,19 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = b'abcde' * 4 * MiB # 20 MiB + data = b"abcde" * 4 * MiB # 20 MiB blob.upload_blob(data, overwrite=True) offset, length = 1 * MiB, 5 * MiB @@ -1077,16 +1092,16 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = stream.read(read_size) # Read in first chunk second = stream.readall() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] read_size = 4 * MiB + 100000 stream = blob.download_blob(offset=offset, length=length) first = stream.read(read_size) # Read past first chunk second = stream.readall() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] stream = blob.download_blob(offset=offset, length=length) first = stream.read(read_size) # Read past first chunk @@ -1094,8 +1109,8 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_length = stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] assert read_length == len(second) @pytest.mark.live_test_only @@ -1105,26 +1120,27 @@ def test_get_blob_using_read_chars(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=1024, max_chunk_get_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = '你好世界' * 1024 # 12 KiB - blob.upload_blob(data, overwrite=True, encoding='utf-8') + data = "你好世界" * 1024 # 12 KiB + blob.upload_blob(data, overwrite=True, encoding="utf-8") # Act / Assert - stream = blob.download_blob(max_concurrency=2, encoding='utf-8') + stream = blob.download_blob(max_concurrency=2, encoding="utf-8") assert stream.read() == data - result = '' - stream = blob.download_blob(encoding='utf-8') + result = "" + stream = blob.download_blob(encoding="utf-8") for _ in range(4): chunk = stream.read(chars=300) result += chunk @@ -1141,11 +1157,11 @@ def test_get_blob_large_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = (b'abcde' * 100 * MiB) + b'abc' # 500 MiB + 3 + content = (b"abcde" * 100 * MiB) + b"abc" # 500 MiB + 3 # Act blob.upload_blob(content, overwrite=True, max_concurrency=5) @@ -1156,20 +1172,20 @@ def test_get_blob_large_blob(self, **kwargs): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_user_agent(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) def assert_user_agent(request): - assert request.http_request.headers['User-Agent'].startswith('azstorage-clientsideencryption/2.0 ') + assert request.http_request.headers["User-Agent"].startswith("azstorage-clientsideencryption/2.0 ") blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act blob.upload_blob(content, overwrite=True, raw_request_hook=assert_user_agent) @@ -1177,21 +1193,21 @@ def assert_user_agent(request): @BlobPreparer() @recorded_by_proxy - @mock.patch('os.urandom', mock_urandom) + @mock.patch("os.urandom", mock_urandom) def test_encryption_user_agent_app_id(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) - app_id = 'TestAppId' - content = b'Hello World Encrypted!' + app_id = "TestAppId" + content = b"Hello World Encrypted!" def assert_user_agent(request): - start = f'{app_id} azstorage-clientsideencryption/2.0 ' - assert request.http_request.headers['User-Agent'].startswith(start) + start = f"{app_id} azstorage-clientsideencryption/2.0 " + assert request.http_request.headers["User-Agent"].startswith(start) # Test method level keyword blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) @@ -1204,9 +1220,10 @@ def assert_user_agent(request): self.bsc.url, credential=storage_account_key, require_encryption=True, - encryption_version='2.0', + encryption_version="2.0", key_encryption_key=kek, - user_agent=app_id) + user_agent=app_id, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2_async.py index 39697234d3fa..aac587e8182b 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_v2_async.py @@ -30,18 +30,16 @@ from test_helpers_async import AsyncStream from settings.testcase import BlobPreparer -TEST_CONTAINER_PREFIX = 'encryptionv2_container' -TEST_BLOB_PREFIX = 'encryptionv2_blob' +TEST_CONTAINER_PREFIX = "encryptionv2_container" +TEST_BLOB_PREFIX = "encryptionv2_blob" MiB = 1024 * 1024 class TestStorageBlobEncryptionV2Async(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, storage_account_name, key): - self.bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=key) - self.container_name = self.get_resource_name('utcontainer') + self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) + self.container_name = self.get_resource_name("utcontainer") if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -58,8 +56,9 @@ def _get_blob_reference(self): def enable_encryption_v2(self, kek): self.bsc.require_encryption = True - self.bsc.encryption_version = '2.0' + self.bsc.encryption_version = "2.0" self.bsc.key_encryption_key = kek + # -------------------------------------------------------------------------- @BlobPreparer() @@ -68,15 +67,15 @@ async def test_v2_blocked_for_page_blob_upload(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) # Act with pytest.raises(ValueError): - await blob.upload_blob(b'Test', blob_type=BlobType.PAGEBLOB) + await blob.upload_blob(b"Test", blob_type=BlobType.PAGEBLOB) @BlobPreparer() @recorded_by_proxy_async @@ -85,14 +84,14 @@ async def test_validate_encryption(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) blob.require_encryption = False @@ -100,11 +99,11 @@ async def test_validate_encryption(self, **kwargs): metadata = (await blob.get_blob_properties()).metadata encrypted_data = await (await blob.download_blob()).readall() - encryption_data = _dict_to_encryption_data(loads(metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(metadata["encryptiondata"])) encryption_agent = encryption_data.encryption_agent - assert '2.0' == encryption_agent.protocol - assert 'AES_GCM_256' == encryption_agent.encryption_algorithm + assert "2.0" == encryption_agent.protocol + assert "AES_GCM_256" == encryption_agent.encryption_algorithm encrypted_region_info = encryption_data.encrypted_region_info assert _GCM_NONCE_LENGTH == encrypted_region_info.nonce_length @@ -127,26 +126,27 @@ async def test_validate_encryption(self, **kwargs): @BlobPreparer() @recorded_by_proxy_async async def test_validate_encryption_chunked_upload(self, **kwargs): - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'a' * 5 * 1024 + content = b"a" * 5 * 1024 # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) blob.require_encryption = False @@ -154,11 +154,11 @@ async def test_validate_encryption_chunked_upload(self, **kwargs): metadata = (await blob.get_blob_properties()).metadata encrypted_data = await (await blob.download_blob()).readall() - encryption_data = _dict_to_encryption_data(loads(metadata['encryptiondata'])) + encryption_data = _dict_to_encryption_data(loads(metadata["encryptiondata"])) encryption_agent = encryption_data.encryption_agent - assert '2.0' == encryption_agent.protocol - assert 'AES_GCM_256' == encryption_agent.encryption_algorithm + assert "2.0" == encryption_agent.protocol + assert "AES_GCM_256" == encryption_agent.encryption_algorithm encrypted_region_info = encryption_data.encrypted_region_info assert _GCM_NONCE_LENGTH == encrypted_region_info.nonce_length @@ -185,14 +185,14 @@ async def test_encryption_kek(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob()).readall() @@ -208,11 +208,11 @@ async def test_encryption_kek_rsa(self, **kwargs): # We can only generate random RSA keys, so this must be run live or # the playback test will fail due to a change in kek values. await self._setup(storage_account_name, storage_account_key) - kek = RSAKeyWrapper('key2') + kek = RSAKeyWrapper("key2") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act await blob.upload_blob(content, overwrite=True) @@ -228,18 +228,18 @@ async def test_encryption_kek_resolver(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) key_resolver = KeyResolver() key_resolver.put_key(self.bsc.key_encryption_key) self.bsc.key_resolver_function = key_resolver.resolve_key blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act self.bsc.key_encryption_key = None - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) # Set kek to None to test only resolver for download @@ -256,21 +256,21 @@ async def test_encryption_with_blob_lease(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" - with mock.patch('os.urandom', mock_urandom): - await blob.upload_blob(b'', overwrite=True) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + with mock.patch("os.urandom", mock_urandom): + await blob.upload_blob(b"", overwrite=True) + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act await blob.upload_blob(content, overwrite=True, lease=lease) with pytest.raises(HttpResponseError): - await blob.download_blob(lease='00000000-1111-2222-3333-444444444445') + await blob.download_blob(lease="00000000-1111-2222-3333-444444444445") data = await (await blob.download_blob(lease=lease)).readall() @@ -284,22 +284,24 @@ async def test_encryption_with_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" - with mock.patch('os.urandom', mock_urandom): - resp = await blob.upload_blob(b'', overwrite=True) - etag = resp['etag'] + with mock.patch("os.urandom", mock_urandom): + resp = await blob.upload_blob(b"", overwrite=True) + etag = resp["etag"] # Act - resp = await blob.upload_blob(content, overwrite=True, etag=etag, match_condition=MatchConditions.IfNotModified) - etag = resp['etag'] + resp = await blob.upload_blob( + content, overwrite=True, etag=etag, match_condition=MatchConditions.IfNotModified + ) + etag = resp["etag"] with pytest.raises(HttpResponseError): - await blob.download_blob(etag='0x111111111111111', match_condition=MatchConditions.IfNotModified) + await blob.download_blob(etag="0x111111111111111", match_condition=MatchConditions.IfNotModified) data = await (await blob.download_blob(etag=etag, match_condition=MatchConditions.IfNotModified)).readall() @@ -314,12 +316,12 @@ async def test_decryption_on_non_encrypted_blob(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Not Encrypted!' + content = b"Hello World Not Encrypted!" await blob.upload_blob(content, overwrite=True) # Act - blob.key_encryption_key = KeyWrapper('key1') + blob.key_encryption_key = KeyWrapper("key1") blob.require_encryption = True with pytest.raises(HttpResponseError): @@ -338,31 +340,31 @@ async def test_encryption_v2_v1_downgrade(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Upload blob with encryption V2 - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) # Modify metadata to look like V1 metadata = (await blob.get_blob_properties()).metadata - encryption_data = loads(metadata['encryptiondata']) - encryption_data['EncryptionAgent']['Protocol'] = '1.0' - encryption_data['EncryptionAgent']['EncryptionAlgorithm'] = 'AES_CBC_256' + encryption_data = loads(metadata["encryptiondata"]) + encryption_data["EncryptionAgent"]["Protocol"] = "1.0" + encryption_data["EncryptionAgent"]["EncryptionAlgorithm"] = "AES_CBC_256" iv = base64.b64encode(os.urandom(16)) - encryption_data['ContentEncryptionIV'] = iv.decode('utf-8') - metadata = {'encryptiondata': dumps(encryption_data)} + encryption_data["ContentEncryptionIV"] = iv.decode("utf-8") + metadata = {"encryptiondata": dumps(encryption_data)} # Act / Assert await blob.set_blob_metadata(metadata) with pytest.raises(HttpResponseError) as e: await blob.download_blob() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy_async @@ -371,31 +373,31 @@ async def test_encryption_modify_cek(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) # Modify cek to not include the version metadata = (await blob.get_blob_properties()).metadata - encryption_data = loads(metadata['encryptiondata']) - encrypted_key = base64.b64decode(encryption_data['WrappedContentKey']['EncryptedKey']) - cek = kek.unwrap_key(encrypted_key, 'A256KW') + encryption_data = loads(metadata["encryptiondata"]) + encrypted_key = base64.b64decode(encryption_data["WrappedContentKey"]["EncryptedKey"]) + cek = kek.unwrap_key(encrypted_key, "A256KW") encrypted_key = kek.wrap_key(cek[8:]) encrypted_key = base64.b64encode(encrypted_key).decode() - encryption_data['WrappedContentKey']['EncryptedKey'] = encrypted_key - metadata = {'encryptiondata': dumps(encryption_data)} + encryption_data["WrappedContentKey"]["EncryptedKey"] = encrypted_key + metadata = {"encryptiondata": dumps(encryption_data)} # Act / Assert await blob.set_blob_metadata(metadata) with pytest.raises(HttpResponseError) as e: await blob.download_blob() - assert 'Decryption failed.' in str(e.value) + assert "Decryption failed." in str(e.value) @BlobPreparer() @recorded_by_proxy_async @@ -404,20 +406,20 @@ async def test_case_insensitive_metadata_key(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Upload blob with encryption V2 - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) # Change the case of the metadata key metadata = (await blob.get_blob_properties()).metadata - encryption_data = metadata['encryptiondata'] - metadata = {'Encryptiondata': encryption_data} + encryption_data = metadata["encryptiondata"] + metadata = {"Encryptiondata": encryption_data} await blob.set_blob_metadata(metadata) # Act @@ -433,14 +435,14 @@ async def test_put_blob_empty(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'' + content = b"" # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob()).readall() @@ -454,21 +456,22 @@ async def test_put_blob_single_region_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 1024 + content = b"abcde" * 1024 # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob()).readall() @@ -482,18 +485,19 @@ async def test_put_blob_multi_region_chunked_size_equal_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -509,18 +513,19 @@ async def test_put_blob_multi_region_chunked_size_equal_region_concurrent(self, storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True, max_concurrency=3) @@ -536,18 +541,19 @@ async def test_put_blob_multi_region_chunked_size_less_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=2 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -563,18 +569,19 @@ async def test_put_blob_multi_region_chunked_size_greater_region(self, **kwargs) storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_put_size=1024, max_block_size=6 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -590,35 +597,36 @@ async def test_put_blob_other_data_types(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" length = len(content) byte_io = BytesIO(content) async_stream = AsyncStream(content) def generator(): - yield b'Hello ' - yield b'World ' - yield b'Encrypted!' + yield b"Hello " + yield b"World " + yield b"Encrypted!" def text_generator(): - yield 'Hello ' - yield 'World ' - yield 'Encrypted!' + yield "Hello " + yield "World " + yield "Encrypted!" async def async_generator(): - yield b'Hello ' - yield b'World ' - yield b'Encrypted!' + yield b"Hello " + yield b"World " + yield b"Encrypted!" data_list = [byte_io, generator(), text_generator(), async_generator(), async_stream] @@ -637,32 +645,33 @@ async def test_put_blob_other_data_types_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 1030 # 5 KiB + 30 + content = b"abcde" * 1030 # 5 KiB + 30 byte_io = BytesIO(content) async_stream = AsyncStream(content) def generator(): for i in range(0, len(content), 500): - yield content[i: i + 500] + yield content[i : i + 500] def text_generator(): - s_content = str(content, encoding='utf-8') + s_content = str(content, encoding="utf-8") for i in range(0, len(s_content), 500): - yield s_content[i: i + 500] + yield s_content[i : i + 500] async def async_generator(): for i in range(0, len(content), 500): - yield content[i: i + 500] + yield content[i : i + 500] data_list = [byte_io, generator(), text_generator(), async_generator(), async_stream] @@ -681,18 +690,18 @@ async def test_get_blob_range_single_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob(offset=0, length=4 * MiB)).readall() # Assert - assert content[:4 * MiB] == data + assert content[: 4 * MiB] == data @pytest.mark.live_test_only @BlobPreparer() @@ -701,11 +710,11 @@ async def test_get_blob_range_multiple_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -721,11 +730,11 @@ async def test_get_blob_range_single_region_beginning_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -741,11 +750,11 @@ async def test_get_blob_range_single_region_middle_to_middle(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -761,11 +770,11 @@ async def test_get_blob_range_single_region_middle_to_end(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * MiB # 4 MiB + content = b"abcd" * MiB # 4 MiB length = len(content) # Act @@ -773,7 +782,7 @@ async def test_get_blob_range_single_region_middle_to_end(self, **kwargs): data = await (await blob.download_blob(offset=length - 1000000, length=1000000)).readall() # Assert - assert content[length - 1000000:] == data + assert content[length - 1000000 :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -782,18 +791,18 @@ async def test_get_blob_range_cross_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act await blob.upload_blob(content, overwrite=True) - data = await (await blob.download_blob(offset=3*1024*1024, length=2*1024*1024)).readall() + data = await (await blob.download_blob(offset=3 * 1024 * 1024, length=2 * 1024 * 1024)).readall() # Assert - assert content[3*1024*1024:5*1024*1024] == data + assert content[3 * 1024 * 1024 : 5 * 1024 * 1024] == data @pytest.mark.live_test_only @BlobPreparer() @@ -802,18 +811,18 @@ async def test_get_blob_range_inside_second_region(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob(offset=5 * MiB, length=MiB)).readall() # Assert - assert content[5 * MiB:6 * MiB] == data + assert content[5 * MiB : 6 * MiB] == data @pytest.mark.live_test_only @BlobPreparer() @@ -822,18 +831,18 @@ async def test_get_blob_range_oversize_length(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcdef' * MiB # 6 MiB + content = b"abcdef" * MiB # 6 MiB # Act await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob(offset=1 * MiB, length=7 * MiB)).readall() # Assert - assert content[1 * MiB:] == data + assert content[1 * MiB :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -842,18 +851,18 @@ async def test_get_blob_range_boundary(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcd' * 2 * MiB # 8 MiB + content = b"abcd" * 2 * MiB # 8 MiB # Act await blob.upload_blob(content, overwrite=True) data = await (await blob.download_blob(offset=4 * MiB - 1, length=4 * MiB + 2)).readall() # Assert - assert content[4 * MiB - 1:] == data + assert content[4 * MiB - 1 :] == data @pytest.mark.live_test_only @BlobPreparer() @@ -862,18 +871,19 @@ async def test_get_blob_chunked_size_equal_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -889,18 +899,19 @@ async def test_get_blob_range_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB await blob.upload_blob(content, overwrite=True) # Act @@ -908,7 +919,7 @@ async def test_get_blob_range_chunked(self, **kwargs): data = await (await blob.download_blob(offset=offset, length=length)).readall() # Assert - assert content[offset:offset + length] == data + assert content[offset : offset + length] == data @pytest.mark.live_test_only @BlobPreparer() @@ -917,18 +928,19 @@ async def test_get_blob_chunked_size_equal_region_size_concurrent(self, **kwargs storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 4 * MiB # 20 MiB + content = b"abcde" * 4 * MiB # 20 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -944,18 +956,19 @@ async def test_get_blob_chunked_size_less_than_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=2 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -971,18 +984,19 @@ async def test_get_blob_chunked_size_greater_than_region_size(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=6 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -998,18 +1012,19 @@ async def test_get_blob_using_chunks_iter(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'abcde' * 3 * MiB # 15 MiB + content = b"abcde" * 3 * MiB # 15 MiB # Act await blob.upload_blob(content, overwrite=True) @@ -1017,7 +1032,7 @@ async def test_get_blob_using_chunks_iter(self, **kwargs): total = 0 async for chunk in chunks_iter: - assert content[total:total+len(chunk)] == chunk + assert content[total : total + len(chunk)] == chunk total += len(chunk) # Assert @@ -1030,18 +1045,19 @@ async def test_get_blob_using_read(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = b'abcde' * 4 * MiB # 20 MiB + data = b"abcde" * 4 * MiB # 20 MiB await blob.upload_blob(data, overwrite=True) # Act @@ -1067,18 +1083,19 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=4 * MiB, max_chunk_get_size=4 * MiB, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = b'abcde' * 4 * MiB # 20 MiB + data = b"abcde" * 4 * MiB # 20 MiB await blob.upload_blob(data, overwrite=True) offset, length = 1 * MiB, 5 * MiB @@ -1089,16 +1106,16 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = await stream.read(read_size) # Read in first chunk second = await stream.readall() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] read_size = 4 * MiB + 100000 stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(read_size) # Read past first chunk second = await stream.readall() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(read_size) # Read past first chunk @@ -1106,8 +1123,8 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_length = await stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + read_size] - assert second == data[offset + read_size:offset + length] + assert first == data[offset : offset + read_size] + assert second == data[offset + read_size : offset + length] assert read_length == len(second) @pytest.mark.live_test_only @@ -1117,26 +1134,27 @@ async def test_get_blob_using_read_chars(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_single_get_size=1024, max_chunk_get_size=1024, require_encryption=True, - encryption_version='2.0', - key_encryption_key=kek) + encryption_version="2.0", + key_encryption_key=kek, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - data = '你好世界' * 1024 # 12 KiB - await blob.upload_blob(data, overwrite=True, encoding='utf-8') + data = "你好世界" * 1024 # 12 KiB + await blob.upload_blob(data, overwrite=True, encoding="utf-8") # Act / Assert - stream = await blob.download_blob(max_concurrency=2, encoding='utf-8') + stream = await blob.download_blob(max_concurrency=2, encoding="utf-8") assert await stream.read() == data - result = '' - stream = await blob.download_blob(encoding='utf-8') + result = "" + stream = await blob.download_blob(encoding="utf-8") for _ in range(4): chunk = await stream.read(chars=300) result += chunk @@ -1153,11 +1171,11 @@ async def test_get_blob_large_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = (b'abcde' * 100 * MiB) + b'abc' # 500 MiB + 3 + content = (b"abcde" * 100 * MiB) + b"abc" # 500 MiB + 3 # Act await blob.upload_blob(content, overwrite=True, max_concurrency=5) @@ -1173,17 +1191,17 @@ async def test_encryption_user_agent(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) def assert_user_agent(request): - assert request.http_request.headers['User-Agent'].startswith('azstorage-clientsideencryption/2.0 ') + assert request.http_request.headers["User-Agent"].startswith("azstorage-clientsideencryption/2.0 ") blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - content = b'Hello World Encrypted!' + content = b"Hello World Encrypted!" # Act - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True, raw_request_hook=assert_user_agent) await (await blob.download_blob(raw_request_hook=assert_user_agent)).readall() @@ -1194,20 +1212,20 @@ async def test_encryption_user_agent_app_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - kek = KeyWrapper('key1') + kek = KeyWrapper("key1") self.enable_encryption_v2(kek) - app_id = 'TestAppId' - content = b'Hello World Encrypted!' + app_id = "TestAppId" + content = b"Hello World Encrypted!" def assert_user_agent(request): - start = f'{app_id} azstorage-clientsideencryption/2.0 ' - assert request.http_request.headers['User-Agent'].startswith(start) + start = f"{app_id} azstorage-clientsideencryption/2.0 " + assert request.http_request.headers["User-Agent"].startswith(start) # Test method level keyword blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True, raw_request_hook=assert_user_agent, user_agent=app_id) await (await blob.download_blob(raw_request_hook=assert_user_agent, user_agent=app_id)).readall() @@ -1216,12 +1234,13 @@ def assert_user_agent(request): self.bsc.url, credential=storage_account_key, require_encryption=True, - encryption_version='2.0', + encryption_version="2.0", key_encryption_key=kek, - user_agent=app_id) + user_agent=app_id, + ) blob = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - with mock.patch('os.urandom', mock_urandom): + with mock.patch("os.urandom", mock_urandom): await blob.upload_blob(content, overwrite=True, raw_request_hook=assert_user_agent) await (await blob.download_blob(raw_request_hook=assert_user_agent)).readall() diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_retry.py b/sdk/storage/azure-storage-blob/tests/test_blob_retry.py index 75cacc6291cf..91e67b1b544c 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_retry.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_retry.py @@ -22,7 +22,7 @@ class TestStorageBlobRetry(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: bsc.create_container(self.container_name) @@ -38,13 +38,11 @@ def test_retry_put_block_with_seekable_stream(self, **kwargs): # Arrange retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - retry_policy=retry + self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=retry ) self._setup(bsc) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = BytesIO(data) @@ -57,13 +55,13 @@ def test_retry_put_block_with_seekable_stream(self, **kwargs): # Assert _, uncommitted_blocks = blob.get_block_list( - block_list_type="uncommitted", - raw_response_hook=responder.override_first_status) + block_list_type="uncommitted", raw_response_hook=responder.override_first_status + ) assert len(uncommitted_blocks) == 1 assert uncommitted_blocks[0].size == PUT_BLOCK_SIZE # Commit block and verify content - blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) + blob.commit_block_list(["1"], raw_response_hook=responder.override_first_status) # Assert content = blob.download_blob().readall() @@ -78,13 +76,11 @@ def test_retry_put_block_with_non_seekable_stream(self, **kwargs): # Arrange retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - retry_policy=retry + self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=retry ) self._setup(bsc) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = NonSeekableStream(BytesIO(data)) @@ -98,13 +94,13 @@ def test_retry_put_block_with_non_seekable_stream(self, **kwargs): # Assert _, uncommitted_blocks = blob.get_block_list( - block_list_type="uncommitted", - raw_response_hook=responder.override_first_status) + block_list_type="uncommitted", raw_response_hook=responder.override_first_status + ) assert len(uncommitted_blocks) == 1 assert uncommitted_blocks[0].size == PUT_BLOCK_SIZE # Commit block and verify content - blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) + blob.commit_block_list(["1"], raw_response_hook=responder.override_first_status) # Assert content = blob.download_blob().readall() diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_retry_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_retry_async.py index 64020c92ff99..1a9587ed1e04 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_retry_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_retry_async.py @@ -26,7 +26,7 @@ def setUp(self): self.retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) async def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: await bsc.create_container(self.container_name) @@ -43,13 +43,11 @@ async def test_retry_put_block_with_seekable_stream(self, **kwargs): # Arrange retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - retry_policy=retry + self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=retry ) await self._setup(bsc) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = BytesIO(data) @@ -62,13 +60,13 @@ async def test_retry_put_block_with_seekable_stream(self, **kwargs): # Assert _, uncommitted_blocks = await blob.get_block_list( - block_list_type="uncommitted", - raw_response_hook=responder.override_first_status) + block_list_type="uncommitted", raw_response_hook=responder.override_first_status + ) assert len(uncommitted_blocks) == 1 assert uncommitted_blocks[0].size == PUT_BLOCK_SIZE # Commit block and verify content - await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) + await blob.commit_block_list(["1"], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() @@ -83,13 +81,11 @@ async def test_retry_put_block_with_non_seekable_stream(self, **kwargs): # Arrange retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - retry_policy=retry + self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=retry ) await self._setup(bsc) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = NonSeekableStream(BytesIO(data)) @@ -103,13 +99,13 @@ async def test_retry_put_block_with_non_seekable_stream(self, **kwargs): # Assert _, uncommitted_blocks = await blob.get_block_list( - block_list_type="uncommitted", - raw_response_hook=responder.override_first_status) + block_list_type="uncommitted", raw_response_hook=responder.override_first_status + ) assert len(uncommitted_blocks) == 1 assert uncommitted_blocks[0].size == PUT_BLOCK_SIZE # Commit block and verify content - await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) + await blob.commit_block_list(["1"], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_service_properties.py b/sdk/storage/azure-storage-blob/tests/test_blob_service_properties.py index 508ee9a0c1d0..fad188a058b4 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_service_properties.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_service_properties.py @@ -16,7 +16,7 @@ Metrics, ResourceTypes, RetentionPolicy, - StaticWebsite + StaticWebsite, ) from devtools_testutils import recorded_by_proxy @@ -32,10 +32,10 @@ class TestServiceProperties(StorageRecordedTestCase): def _assert_properties_default(self, prop): assert prop is not None - self._assert_logging_equal(prop['analytics_logging'], BlobAnalyticsLogging()) - self._assert_metrics_equal(prop['hour_metrics'], Metrics()) - self._assert_metrics_equal(prop['minute_metrics'], Metrics()) - self._assert_cors_equal(prop['cors'], list()) + self._assert_logging_equal(prop["analytics_logging"], BlobAnalyticsLogging()) + self._assert_metrics_equal(prop["hour_metrics"], Metrics()) + self._assert_metrics_equal(prop["minute_metrics"], Metrics()) + self._assert_cors_equal(prop["cors"], list()) def _assert_logging_equal(self, log1, log2): if log1 is None or log2 is None: @@ -119,14 +119,14 @@ def test_blob_service_properties(self, **kwargs): hour_metrics=Metrics(), minute_metrics=Metrics(), cors=list(), - target_version='2014-02-14' + target_version="2014-02-14", ) # Assert assert resp is None props = bsc.get_service_properties() self._assert_properties_default(props) - assert '2014-02-14' == props['target_version'] + assert "2014-02-14" == props["target_version"] # --Test cases per feature --------------------------------------- @BlobPreparer() @@ -148,11 +148,11 @@ def test_set_default_service_version(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) # Act - bsc.set_service_properties(target_version='2014-02-14') + bsc.set_service_properties(target_version="2014-02-14") # Assert received_props = bsc.get_service_properties() - assert received_props['target_version'] == '2014-02-14' + assert received_props["target_version"] == "2014-02-14" @BlobPreparer() @recorded_by_proxy @@ -168,7 +168,7 @@ def test_set_delete_retention_policy(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) @BlobPreparer() @recorded_by_proxy @@ -182,7 +182,7 @@ def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) # Should work with maximum settings delete_retention_policy = RetentionPolicy(enabled=True, days=365) @@ -190,7 +190,7 @@ def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) # Should not work with 0 days delete_retention_policy = RetentionPolicy(enabled=True, days=0) @@ -200,7 +200,9 @@ def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props["delete_retention_policy"], delete_retention_policy + ) # Should not work with 366 days delete_retention_policy = RetentionPolicy(enabled=True, days=366) @@ -210,7 +212,9 @@ def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props["delete_retention_policy"], delete_retention_policy + ) @BlobPreparer() @recorded_by_proxy @@ -226,7 +230,7 @@ def test_set_disabled_delete_retention_policy(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) @BlobPreparer() @recorded_by_proxy @@ -236,16 +240,15 @@ def test_set_static_website_properties(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) static_website = StaticWebsite( - enabled=True, - index_document="index.html", - error_document404_path="errors/error/404error.html") + enabled=True, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act bsc.set_service_properties(static_website=static_website) # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy @@ -255,16 +258,15 @@ def test_set_static_website_properties_with_default_index_document_path(self, ** bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) static_website = StaticWebsite( - enabled=True, - error_document404_path="errors/error/404error.html", - default_index_document_path="index.html") + enabled=True, error_document404_path="errors/error/404error.html", default_index_document_path="index.html" + ) # Act bsc.set_service_properties(static_website=static_website) # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy @@ -282,7 +284,7 @@ def test_set_static_website_properties_missing_field(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) # Case2: Arrange index document missing static_website = StaticWebsite(enabled=True, error_document404_path="errors/error/404error.html") @@ -292,7 +294,7 @@ def test_set_static_website_properties_missing_field(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) # Case3: Arrange error document missing static_website = StaticWebsite(enabled=True, index_document="index.html") @@ -302,7 +304,7 @@ def test_set_static_website_properties_missing_field(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy @@ -311,15 +313,16 @@ def test_disabled_static_website_properties(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - static_website = StaticWebsite(enabled=False, index_document="index.html", - error_document404_path="errors/error/404error.html") + static_website = StaticWebsite( + enabled=False, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act bsc.set_service_properties(static_website=static_website) # Assert received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], StaticWebsite(enabled=False)) + self._assert_static_website_equal(received_props["static_website"], StaticWebsite(enabled=False)) @BlobPreparer() @recorded_by_proxy @@ -328,10 +331,10 @@ def test_set_static_website_props_dont_impact_other_props(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) + cors_rule1 = CorsRule(["www.xyz.com"], ["GET"]) - allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] - allowed_methods = ['GET', 'PUT'] + allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] + allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] @@ -340,7 +343,8 @@ def test_set_static_website_props_dont_impact_other_props(self, **kwargs): allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, - allowed_headers=allowed_headers) + allowed_headers=allowed_headers, + ) cors = [cors_rule1, cors_rule2] @@ -349,19 +353,20 @@ def test_set_static_website_props_dont_impact_other_props(self, **kwargs): # Assert cors is updated received_props = bsc.get_service_properties() - self._assert_cors_equal(received_props['cors'], cors) + self._assert_cors_equal(received_props["cors"], cors) # Arrange to set static website properties - static_website = StaticWebsite(enabled=True, index_document="index.html", - error_document404_path="errors/error/404error.html") + static_website = StaticWebsite( + enabled=True, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act to set static website bsc.set_service_properties(static_website=static_website) # Assert static website was updated was cors was unchanged received_props = bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) - self._assert_cors_equal(received_props['cors'], cors) + self._assert_static_website_equal(received_props["static_website"], static_website) + self._assert_cors_equal(received_props["cors"], cors) @BlobPreparer() @recorded_by_proxy @@ -370,14 +375,16 @@ def test_set_logging(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - logging = BlobAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + logging = BlobAnalyticsLogging( + read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Act bsc.set_service_properties(analytics_logging=logging) # Assert received_props = bsc.get_service_properties() - self._assert_logging_equal(received_props['analytics_logging'], logging) + self._assert_logging_equal(received_props["analytics_logging"], logging) @BlobPreparer() @recorded_by_proxy @@ -393,7 +400,7 @@ def test_set_hour_metrics(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics) + self._assert_metrics_equal(received_props["hour_metrics"], hour_metrics) @BlobPreparer() @recorded_by_proxy @@ -402,15 +409,16 @@ def test_set_minute_metrics(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=5)) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Act bsc.set_service_properties(minute_metrics=minute_metrics) # Assert received_props = bsc.get_service_properties() - self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics) + self._assert_metrics_equal(received_props["minute_metrics"], minute_metrics) @BlobPreparer() @recorded_by_proxy @@ -419,10 +427,10 @@ def test_set_cors(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) + cors_rule1 = CorsRule(["www.xyz.com"], ["GET"]) - allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] - allowed_methods = ['GET', 'PUT'] + allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] + allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] @@ -431,7 +439,8 @@ def test_set_cors(self, **kwargs): allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, - allowed_headers=allowed_headers) + allowed_headers=allowed_headers, + ) cors = [cors_rule1, cors_rule2] @@ -440,7 +449,7 @@ def test_set_cors(self, **kwargs): # Assert received_props = bsc.get_service_properties() - self._assert_cors_equal(received_props['cors'], cors) + self._assert_cors_equal(received_props["cors"], cors) @pytest.mark.live_test_only @BlobPreparer() @@ -454,7 +463,7 @@ def test_get_service_properties_account_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(service=True), permission=AccountSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=3) + expiry=datetime.utcnow() + timedelta(hours=3), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=sas_token) @@ -472,9 +481,7 @@ def test_retention_no_days(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - pytest.raises(ValueError, - RetentionPolicy, - True, None) + pytest.raises(ValueError, RetentionPolicy, True, None) @BlobPreparer() @recorded_by_proxy @@ -485,11 +492,10 @@ def test_too_many_cors_rules(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) cors = [] for i in range(0, 6): - cors.append(CorsRule(['www.xyz.com'], ['GET'])) + cors.append(CorsRule(["www.xyz.com"], ["GET"])) # Assert - pytest.raises(HttpResponseError, - bsc.set_service_properties, None, None, None, cors) + pytest.raises(HttpResponseError, bsc.set_service_properties, None, None, None, cors) @BlobPreparer() @recorded_by_proxy @@ -498,13 +504,12 @@ def test_retention_too_long(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=366)) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=366) + ) # Assert - pytest.raises(HttpResponseError, - bsc.set_service_properties, - None, None, minute_metrics) + pytest.raises(HttpResponseError, bsc.set_service_properties, None, None, minute_metrics) # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_service_properties_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_service_properties_async.py index 6cf2fcace4e2..b63511b67d86 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_service_properties_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_service_properties_async.py @@ -15,7 +15,7 @@ Metrics, ResourceTypes, RetentionPolicy, - StaticWebsite + StaticWebsite, ) from azure.storage.blob.aio import BlobServiceClient @@ -32,10 +32,10 @@ class TestServicePropertiesTest(AsyncStorageRecordedTestCase): def _assert_properties_default(self, prop): assert prop is not None - self._assert_logging_equal(prop['analytics_logging'], BlobAnalyticsLogging()) - self._assert_metrics_equal(prop['hour_metrics'], Metrics()) - self._assert_metrics_equal(prop['minute_metrics'], Metrics()) - self._assert_cors_equal(prop['cors'], list()) + self._assert_logging_equal(prop["analytics_logging"], BlobAnalyticsLogging()) + self._assert_metrics_equal(prop["hour_metrics"], Metrics()) + self._assert_metrics_equal(prop["minute_metrics"], Metrics()) + self._assert_cors_equal(prop["cors"], list()) def _assert_logging_equal(self, log1, log2): if log1 is None or log2 is None: @@ -128,14 +128,14 @@ async def test_blob_service_properties(self, **kwargs): hour_metrics=Metrics(), minute_metrics=Metrics(), cors=list(), - target_version='2014-02-14' + target_version="2014-02-14", ) # Assert assert resp is None props = await bsc.get_service_properties() self._assert_properties_default(props) - assert '2014-02-14' == props['target_version'] + assert "2014-02-14" == props["target_version"] # --Test cases per feature --------------------------------------- @BlobPreparer() @@ -147,11 +147,11 @@ async def test_set_default_service_version(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) # Act - await bsc.set_service_properties(target_version='2014-02-14') + await bsc.set_service_properties(target_version="2014-02-14") # Assert received_props = await bsc.get_service_properties() - assert received_props['target_version'] == '2014-02-14' + assert received_props["target_version"] == "2014-02-14" @BlobPreparer() @recorded_by_proxy_async @@ -167,7 +167,7 @@ async def test_set_delete_retention_policy(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) @BlobPreparer() @recorded_by_proxy_async @@ -183,7 +183,7 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) # Should work with maximum settings delete_retention_policy = RetentionPolicy(enabled=True, days=365) @@ -191,7 +191,7 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) # Should not work with 0 days delete_retention_policy = RetentionPolicy(enabled=True, days=0) @@ -201,7 +201,9 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props["delete_retention_policy"], delete_retention_policy + ) # Should not work with 366 days delete_retention_policy = RetentionPolicy(enabled=True, days=366) @@ -211,7 +213,9 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props["delete_retention_policy"], delete_retention_policy + ) @BlobPreparer() @recorded_by_proxy_async @@ -227,7 +231,7 @@ async def test_set_disabled_delete_retention_policy(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_equal(received_props["delete_retention_policy"], delete_retention_policy) @BlobPreparer() @recorded_by_proxy_async @@ -237,16 +241,15 @@ async def test_set_static_website_properties(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) static_website = StaticWebsite( - enabled=True, - index_document="index.html", - error_document404_path="errors/error/404error.html") + enabled=True, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act await bsc.set_service_properties(static_website=static_website) # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy_async @@ -256,16 +259,15 @@ async def test_set_static_website_properties_with_default_index_document_path(se bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) static_website = StaticWebsite( - enabled=True, - error_document404_path="errors/error/404error.html", - default_index_document_path="index.html") + enabled=True, error_document404_path="errors/error/404error.html", default_index_document_path="index.html" + ) # Act await bsc.set_service_properties(static_website=static_website) # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy_async @@ -283,7 +285,7 @@ async def test_set_static_web_props_missing_field(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) # Case2: Arrange index document missing static_website = StaticWebsite(enabled=True, error_document404_path="errors/error/404error.html") @@ -293,7 +295,7 @@ async def test_set_static_web_props_missing_field(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) # Case3: Arrange error document missing static_website = StaticWebsite(enabled=True, index_document="index.html") @@ -303,7 +305,7 @@ async def test_set_static_web_props_missing_field(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) + self._assert_static_website_equal(received_props["static_website"], static_website) @BlobPreparer() @recorded_by_proxy_async @@ -312,15 +314,16 @@ async def test_disabled_static_website_properties(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - static_website = StaticWebsite(enabled=False, index_document="index.html", - error_document404_path="errors/error/404error.html") + static_website = StaticWebsite( + enabled=False, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act await bsc.set_service_properties(static_website=static_website) # Assert received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], StaticWebsite(enabled=False)) + self._assert_static_website_equal(received_props["static_website"], StaticWebsite(enabled=False)) @BlobPreparer() @recorded_by_proxy_async @@ -329,10 +332,10 @@ async def test_set_static_webprops_no_impact_other_props(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) + cors_rule1 = CorsRule(["www.xyz.com"], ["GET"]) - allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] - allowed_methods = ['GET', 'PUT'] + allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] + allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] @@ -341,7 +344,8 @@ async def test_set_static_webprops_no_impact_other_props(self, **kwargs): allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, - allowed_headers=allowed_headers) + allowed_headers=allowed_headers, + ) cors = [cors_rule1, cors_rule2] @@ -350,19 +354,20 @@ async def test_set_static_webprops_no_impact_other_props(self, **kwargs): # Assert cors is updated received_props = await bsc.get_service_properties() - self._assert_cors_equal(received_props['cors'], cors) + self._assert_cors_equal(received_props["cors"], cors) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - static_website = StaticWebsite(enabled=True, index_document="index.html", - error_document404_path="errors/error/404error.html") + static_website = StaticWebsite( + enabled=True, index_document="index.html", error_document404_path="errors/error/404error.html" + ) # Act to set static website await bsc.set_service_properties(static_website=static_website) # Assert static website was updated was cors was unchanged received_props = await bsc.get_service_properties() - self._assert_static_website_equal(received_props['static_website'], static_website) - self._assert_cors_equal(received_props['cors'], cors) + self._assert_static_website_equal(received_props["static_website"], static_website) + self._assert_cors_equal(received_props["cors"], cors) @BlobPreparer() @recorded_by_proxy_async @@ -371,14 +376,16 @@ async def test_set_logging(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - logging = BlobAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + logging = BlobAnalyticsLogging( + read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Act await bsc.set_service_properties(analytics_logging=logging) # Assert received_props = await bsc.get_service_properties() - self._assert_logging_equal(received_props['analytics_logging'], logging) + self._assert_logging_equal(received_props["analytics_logging"], logging) @BlobPreparer() @recorded_by_proxy_async @@ -394,7 +401,7 @@ async def test_set_hour_metrics(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics) + self._assert_metrics_equal(received_props["hour_metrics"], hour_metrics) @BlobPreparer() @recorded_by_proxy_async @@ -403,15 +410,16 @@ async def test_set_minute_metrics(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=5)) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Act await bsc.set_service_properties(minute_metrics=minute_metrics) # Assert received_props = await bsc.get_service_properties() - self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics) + self._assert_metrics_equal(received_props["minute_metrics"], minute_metrics) @BlobPreparer() @recorded_by_proxy_async @@ -420,10 +428,10 @@ async def test_set_cors(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) + cors_rule1 = CorsRule(["www.xyz.com"], ["GET"]) - allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] - allowed_methods = ['GET', 'PUT'] + allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] + allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] @@ -432,7 +440,8 @@ async def test_set_cors(self, **kwargs): allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, - allowed_headers=allowed_headers) + allowed_headers=allowed_headers, + ) cors = [cors_rule1, cors_rule2] @@ -441,7 +450,7 @@ async def test_set_cors(self, **kwargs): # Assert received_props = await bsc.get_service_properties() - self._assert_cors_equal(received_props['cors'], cors) + self._assert_cors_equal(received_props["cors"], cors) @pytest.mark.live_test_only @BlobPreparer() @@ -455,7 +464,7 @@ async def test_get_service_properties_account_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(service=True), permission=AccountSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=3) + expiry=datetime.utcnow() + timedelta(hours=3), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=sas_token) @@ -472,9 +481,7 @@ async def test_retention_no_days(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Assert - pytest.raises(ValueError, - RetentionPolicy, - True, None) + pytest.raises(ValueError, RetentionPolicy, True, None) @BlobPreparer() @recorded_by_proxy_async @@ -485,7 +492,7 @@ async def test_too_many_cors_rules(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) cors = [] for i in range(0, 6): - cors.append(CorsRule(['www.xyz.com'], ['GET'])) + cors.append(CorsRule(["www.xyz.com"], ["GET"])) # Assert with pytest.raises(HttpResponseError): @@ -498,11 +505,13 @@ async def test_retention_too_long(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - minute_metrics = Metrics(enabled=True, include_apis=True, - retention_policy=RetentionPolicy(enabled=True, days=366)) + minute_metrics = Metrics( + enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=366) + ) # Assert with pytest.raises(HttpResponseError): await bsc.set_service_properties(None, None, minute_metrics) + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_service_stats.py b/sdk/storage/azure-storage-blob/tests/test_blob_service_stats.py index f22c81764957..8a8d31d2a742 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_service_stats.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_service_stats.py @@ -17,17 +17,18 @@ class TestServiceStats(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _assert_stats_default(self, stats): assert stats is not None - assert stats['geo_replication'] is not None + assert stats["geo_replication"] is not None - assert stats['geo_replication']['status'] == 'live' - assert stats['geo_replication']['last_sync_time'] is not None + assert stats["geo_replication"]["status"] == "live" + assert stats["geo_replication"]["last_sync_time"] is not None def _assert_stats_unavailable(self, stats): assert stats is not None - assert stats['geo_replication'] is not None + assert stats["geo_replication"] is not None + + assert stats["geo_replication"]["status"] == "unavailable" + assert stats["geo_replication"]["last_sync_time"] is None - assert stats['geo_replication']['status'] == 'unavailable' - assert stats['geo_replication']['last_sync_time'] is None # -------------------------------------------------------------------------- @pytest.mark.playback_test_only @@ -68,4 +69,5 @@ def test_blob_service_stats_when_unavailable(self, **kwargs): # Assert self._assert_stats_unavailable(stats) + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_service_stats_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_service_stats_async.py index 9411ab0eab88..25e9a03380cc 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_service_stats_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_service_stats_async.py @@ -17,17 +17,18 @@ class TestServiceStatsAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- def _assert_stats_default(self, stats): assert stats is not None - assert stats['geo_replication'] is not None + assert stats["geo_replication"] is not None - assert stats['geo_replication']['status'] == 'live' - assert stats['geo_replication']['last_sync_time'] is not None + assert stats["geo_replication"]["status"] == "live" + assert stats["geo_replication"]["last_sync_time"] is not None def _assert_stats_unavailable(self, stats): assert stats is not None - assert stats['geo_replication'] is not None + assert stats["geo_replication"] is not None + + assert stats["geo_replication"]["status"] == "unavailable" + assert stats["geo_replication"]["last_sync_time"] is None - assert stats['geo_replication']['status'] == 'unavailable' - assert stats['geo_replication']['last_sync_time'] is None # -------------------------------------------------------------------------- @pytest.mark.playback_test_only @@ -65,4 +66,5 @@ async def test_blob_service_stats_when_unavailable(self, **kwargs): # Assert self._assert_stats_unavailable(stats) + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py index 123fd2b50e46..c3fcd8acbab7 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py @@ -11,14 +11,14 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ class TestBlobStorageAccount(StorageRecordedTestCase): def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: bsc.create_container(self.container_name) @@ -32,13 +32,14 @@ def _get_blob_reference(self, bsc): def _create_blob(self, bsc): blob = self._get_blob_reference(bsc) - blob.upload_blob(b'') + blob.upload_blob(b"") return blob def assertBlobEqual(self, container_name, blob_name, expected_data, bsc): blob = bsc.get_blob_client(container_name, blob_name) actual_data = blob.download_blob().readall() assert actual_data == expected_data + # -------------------------------------------------------------------------- @BlobPreparer() @@ -55,7 +56,7 @@ def test_standard_blob_tier_set_tier_api(self, **kwargs): for tier in tiers: blob_name = self.get_resource_name(tier.value) blob = bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(b'hello world') + blob.upload_blob(b"hello world") blob_ref = blob.get_blob_properties() assert blob_ref.blob_tier is not None @@ -88,13 +89,12 @@ def test_set_standard_blob_tier_with_rehydrate_priority(self, **kwargs): rehydrate_priority = RehydratePriority.standard # Act - blob_client.set_standard_blob_tier(blob_tier, - rehydrate_priority=rehydrate_priority) + blob_client.set_standard_blob_tier(blob_tier, rehydrate_priority=rehydrate_priority) blob_client.set_standard_blob_tier(rehydrate_tier) blob_props = blob_client.get_blob_properties() # Assert - assert 'rehydrate-pending-to-cool' == blob_props.archive_status + assert "rehydrate-pending-to-cool" == blob_props.archive_status @BlobPreparer() @recorded_by_proxy @@ -104,11 +104,11 @@ def test_rehydration_status(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - blob_name = 'rehydration_test_blob_1' - blob_name2 = 'rehydration_test_blob_2' + blob_name = "rehydration_test_blob_1" + blob_name2 = "rehydration_test_blob_2" container = bsc.get_container_client(self.container_name) - data = b'hello world' + data = b"hello world" blob = container.upload_blob(blob_name, data) blob.set_standard_blob_tier(StandardBlobTier.Archive) blob.set_standard_blob_tier(StandardBlobTier.Cool) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py index 82b1ee006244..eeaa0fb809e2 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py @@ -13,14 +13,14 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ class TestBlobStorageAccountAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: await bsc.create_container(self.container_name) @@ -33,13 +33,14 @@ def _get_blob_reference(self, bsc): async def _create_blob(self, bsc): blob = self._get_blob_reference(bsc) - await blob.upload_blob(b'') + await blob.upload_blob(b"") return blob async def assertBlobEqual(self, container_name, blob_name, expected_data, bsc): blob = bsc.get_blob_client(container_name, blob_name) actual_data = await blob.download_blob().readall() assert actual_data == expected_data + # -------------------------------------------------------------------------- @BlobPreparer() @@ -56,7 +57,7 @@ async def test_standard_blob_tier_set_tier_api(self, **kwargs): for tier in tiers: blob_name = self.get_resource_name(tier.value) blob = bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(b'hello world') + await blob.upload_blob(b"hello world") blob_ref = await blob.get_blob_properties() assert blob_ref.blob_tier is not None @@ -89,13 +90,12 @@ async def test_set_std_blob_tier_w_rehydrate_priority(self, **kwargs): rehydrate_priority = RehydratePriority.standard # Act - await blob_client.set_standard_blob_tier(blob_tier, - rehydrate_priority=rehydrate_priority) + await blob_client.set_standard_blob_tier(blob_tier, rehydrate_priority=rehydrate_priority) await blob_client.set_standard_blob_tier(rehydrate_tier) blob_props = await blob_client.get_blob_properties() # Assert - assert 'rehydrate-pending-to-cool' == blob_props.archive_status + assert "rehydrate-pending-to-cool" == blob_props.archive_status @BlobPreparer() @recorded_by_proxy_async @@ -105,11 +105,11 @@ async def test_rehydration_status(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) await self._setup(bsc) - blob_name = 'rehydration_test_blob_1' - blob_name2 = 'rehydration_test_blob_2' + blob_name = "rehydration_test_blob_1" + blob_name2 = "rehydration_test_blob_2" container = bsc.get_container_client(self.container_name) - data = b'hello world' + data = b"hello world" blob = await container.upload_blob(blob_name, data) await blob.set_standard_blob_tier(StandardBlobTier.Archive) await blob.set_standard_blob_tier(StandardBlobTier.Cool) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_tags.py b/sdk/storage/azure-storage-blob/tests/test_blob_tags.py index 8008e52cf0e9..3e372942bc48 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_tags.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_tags.py @@ -18,17 +18,18 @@ BlobServiceClient, generate_account_sas, generate_blob_sas, - ResourceTypes + ResourceTypes, ) from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase from settings.testcase import BlobPreparer -#------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'container' -TEST_BLOB_PREFIX = 'blob' -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +TEST_CONTAINER_PREFIX = "container" +TEST_BLOB_PREFIX = "blob" +# ------------------------------------------------------------------------------ + class TestStorageBlobTags(StorageRecordedTestCase): @@ -43,7 +44,6 @@ def _setup(self, storage_account_name, key): pass self.byte_data = self.get_random_bytes(1024) - def _teardown(self, FILE_PATH): if os.path.isfile(FILE_PATH): try: @@ -51,7 +51,7 @@ def _teardown(self, FILE_PATH): except: pass - #--Helpers----------------------------------------------------------------- + # --Helpers----------------------------------------------------------------- def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) @@ -64,7 +64,7 @@ def _create_block_blob(self, tags=None, container_name=None, blob_name=None): def _create_empty_block_blob(self, tags=None): blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - resp = blob_client.upload_blob(b'', length=0, overwrite=True, tags=tags) + resp = blob_client.upload_blob(b"", length=0, overwrite=True, tags=tags) return blob_client, resp def _create_append_blob(self, tags=None): @@ -87,7 +87,7 @@ def _create_container(self, prefix="container"): pass return container_name - #-- test cases for blob tags ---------------------------------------------- + # -- test cases for blob tags ---------------------------------------------- @BlobPreparer() @recorded_by_proxy @@ -113,7 +113,7 @@ def test_set_blob_tags_with_lease(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_client, _ = self._create_block_blob() - lease = blob_client.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob_client.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act blob_tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} @@ -144,7 +144,7 @@ def test_set_blob_tags_for_a_version(self, **kwargs): # Act tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - resp = blob_client.set_blob_tags(tags, version_id=resp['version_id']) + resp = blob_client.set_blob_tags(tags, version_id=resp["version_id"]) # Assert assert resp is not None @@ -265,16 +265,18 @@ def test_commit_block_list_with_tags(self, **kwargs): self._setup(storage_account_name, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - blob_client, resp = self._create_empty_block_blob(tags={'condition tag': 'test tag'}) + blob_client, resp = self._create_empty_block_blob(tags={"condition tag": "test tag"}) - blob_client.stage_block('1', b'AAA') - blob_client.stage_block('2', b'BBB') - blob_client.stage_block('3', b'CCC') + blob_client.stage_block("1", b"AAA") + blob_client.stage_block("2", b"BBB") + blob_client.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] with pytest.raises(ResourceModifiedError): - blob_client.commit_block_list(block_list, tags=tags, if_tags_match_condition="\"condition tag\"='wrong tag'") + blob_client.commit_block_list( + block_list, tags=tags, if_tags_match_condition="\"condition tag\"='wrong tag'" + ) blob_client.commit_block_list(block_list, tags=tags, if_tags_match_condition="\"condition tag\"='test tag'") resp = blob_client.get_blob_tags() @@ -294,17 +296,18 @@ def test_start_copy_from_url_with_tags(self, **kwargs): blob_client, resp = self._create_block_blob() # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_client.blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_client.blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = copyblob.start_copy_from_url(sourceblob, tags=tags) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = copyblob.download_blob().readall() assert copy_content == self.byte_data @@ -324,7 +327,7 @@ def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): self._setup(storage_account_name, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} source_blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - source_blob.upload_blob(b'Hello World', overwrite=True, tags=tags) + source_blob.upload_blob(b"Hello World", overwrite=True, tags=tags) source_sas = self.generate_sas( generate_blob_sas, @@ -335,8 +338,8 @@ def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): permission=BlobSasPermissions(read=True, tag=True), expiry=datetime.utcnow() + timedelta(hours=1), ) - source_url = source_blob.url + '?' + source_sas - dest_blob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + source_url = source_blob.url + "?" + source_sas + dest_blob = self.bsc.get_blob_client(self.container_name, "blob1copy") # Act with pytest.raises(ValueError): @@ -346,9 +349,9 @@ def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_tags = dest_blob.get_blob_tags() @@ -366,7 +369,7 @@ def test_start_copy_from_url_with_tags_replace_tags(self, **kwargs): tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} tags2 = {"hello": "world"} source_blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - source_blob.upload_blob(b'Hello World', overwrite=True, tags=tags) + source_blob.upload_blob(b"Hello World", overwrite=True, tags=tags) source_sas = self.generate_sas( generate_blob_sas, @@ -377,17 +380,17 @@ def test_start_copy_from_url_with_tags_replace_tags(self, **kwargs): permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) - source_url = source_blob.url + '?' + source_sas - dest_blob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + source_url = source_blob.url + "?" + source_sas + dest_blob = self.bsc.get_blob_client(self.container_name, "blob1copy") # Act copy = dest_blob.start_copy_from_url(source_url, tags=tags2, requires_sync=True) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_tags = dest_blob.get_blob_tags() @@ -407,7 +410,7 @@ def test_list_blobs_returns_tags(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob_list = container.list_blobs(include="tags") - #Assert + # Assert for blob in blob_list: assert blob.tag_count == len(tags) for key, value in blob.tags.items(): @@ -441,9 +444,9 @@ def test_filter_blobs(self, **kwargs): assert 2 == len(items_on_page1) assert 2 == len(items_on_page2) - assert len(items_on_page2[0]['tags']) == 2 - assert items_on_page2[0]['tags']['tag1'] == 'firsttag' - assert items_on_page2[0]['tags']['tag2'] == 'secondtag' + assert len(items_on_page2[0]["tags"]) == 2 + assert items_on_page2[0]["tags"]["tag1"] == "firsttag" + assert items_on_page2[0]["tags"]["tag2"] == "secondtag" @pytest.mark.live_test_only @BlobPreparer() @@ -455,13 +458,14 @@ def test_filter_blobs_using_account_sas(self, **kwargs): storage_account_name, storage_account_key, ResourceTypes(service=True, container=True, object=True), - AccountSasPermissions(write=True, list=True, read=True, delete_previous_version=True, tag=True, - filter_by_tags=True), + AccountSasPermissions( + write=True, list=True, read=True, delete_previous_version=True, tag=True, filter_by_tags=True + ), datetime.utcnow() + timedelta(hours=1), ) self._setup(storage_account_name, token) - tags = {"year": '1000', "tag2": "secondtag", "tag3": "thirdtag", "habitat_type": 'Shallow Lowland Billabongs'} + tags = {"year": "1000", "tag2": "secondtag", "tag3": "thirdtag", "habitat_type": "Shallow Lowland Billabongs"} blob_client, _ = self._create_block_blob(tags=tags, container_name=self.container_name) blob_client.set_blob_tags(tags=tags) tags_on_blob = blob_client.get_blob_tags() @@ -489,13 +493,14 @@ def test_set_blob_tags_using_blob_sas(self, **kwargs): storage_account_name, storage_account_key, ResourceTypes(service=True, container=True, object=True), - AccountSasPermissions(write=True, list=True, read=True, delete_previous_version=True, tag=True, - filter_by_tags=True), + AccountSasPermissions( + write=True, list=True, read=True, delete_previous_version=True, tag=True, filter_by_tags=True + ), datetime.utcnow() + timedelta(hours=1), ) self._setup(storage_account_name, token) - tags = {"year": '2000', "tag2": "tagtwo", "tag3": "tagthree", "habitat_type": 'Shallow Lowland Billabongs'} + tags = {"year": "2000", "tag2": "tagtwo", "tag3": "tagthree", "habitat_type": "Shallow Lowland Billabongs"} blob_client, _ = self._create_block_blob(tags=tags, container_name=self.container_name) token1 = generate_blob_sas( storage_account_name, @@ -522,4 +527,6 @@ def test_set_blob_tags_using_blob_sas(self, **kwargs): first_page = next(blob_list) items_on_page1 = list(first_page) assert 1 == len(items_on_page1) -#------------------------------------------------------------------------------ + + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_tags_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_tags_async.py index 94e712966216..858603e43e34 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_tags_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_tags_async.py @@ -16,10 +16,11 @@ from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase from settings.testcase import BlobPreparer -#------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'container' -TEST_BLOB_PREFIX = 'blob' -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +TEST_CONTAINER_PREFIX = "container" +TEST_BLOB_PREFIX = "blob" +# ------------------------------------------------------------------------------ + class TestStorageBlobTags(AsyncStorageRecordedTestCase): @@ -34,7 +35,7 @@ async def _setup(self, storage_account_name, key): pass self.byte_data = self.get_random_bytes(1024) - #--Helpers----------------------------------------------------------------- + # --Helpers----------------------------------------------------------------- def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) @@ -47,7 +48,7 @@ async def _create_block_blob(self, tags=None, container_name=None, blob_name=Non async def _create_empty_block_blob(self, tags=None): blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - resp = await blob_client.upload_blob(b'', length=0, overwrite=True, tags=tags) + resp = await blob_client.upload_blob(b"", length=0, overwrite=True, tags=tags) return blob_client, resp async def _create_append_blob(self, tags=None): @@ -70,7 +71,7 @@ async def _create_container(self, prefix="container"): pass return container_name - #-- test cases for blob tags ---------------------------------------------- + # -- test cases for blob tags ---------------------------------------------- @BlobPreparer() @recorded_by_proxy_async @@ -96,7 +97,7 @@ async def test_set_blob_tags_with_lease(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_client, _ = await self._create_block_blob() - lease = await blob_client.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob_client.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act blob_tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} @@ -128,7 +129,7 @@ async def test_set_blob_tags_for_a_version(self, **kwargs): # Act tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - resp = await blob_client.set_blob_tags(tags, version_id=resp['version_id']) + resp = await blob_client.set_blob_tags(tags, version_id=resp["version_id"]) # Assert assert resp is not None @@ -249,17 +250,21 @@ async def test_commit_block_list_with_tags(self, **kwargs): await self._setup(storage_account_name, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - blob_client, resp = await self._create_empty_block_blob(tags={'condition tag': 'test tag'}) + blob_client, resp = await self._create_empty_block_blob(tags={"condition tag": "test tag"}) - await blob_client.stage_block('1', b'AAA') - await blob_client.stage_block('2', b'BBB') - await blob_client.stage_block('3', b'CCC') + await blob_client.stage_block("1", b"AAA") + await blob_client.stage_block("2", b"BBB") + await blob_client.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] with pytest.raises(ResourceModifiedError): - await blob_client.commit_block_list(block_list, tags=tags, if_tags_match_condition="\"condition tag\"='wrong tag'") - await blob_client.commit_block_list(block_list, tags=tags, if_tags_match_condition="\"condition tag\"='test tag'") + await blob_client.commit_block_list( + block_list, tags=tags, if_tags_match_condition="\"condition tag\"='wrong tag'" + ) + await blob_client.commit_block_list( + block_list, tags=tags, if_tags_match_condition="\"condition tag\"='test tag'" + ) resp = await blob_client.get_blob_tags() @@ -278,17 +283,18 @@ async def test_start_copy_from_url_with_tags(self, **kwargs): blob_client, resp = await self._create_block_blob() # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_client.blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_client.blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = await copyblob.start_copy_from_url(sourceblob, tags=tags) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = await (await copyblob.download_blob()).readall() assert copy_content == self.byte_data @@ -308,7 +314,7 @@ async def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): await self._setup(storage_account_name, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} source_blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await source_blob.upload_blob(b'Hello World', overwrite=True, tags=tags) + await source_blob.upload_blob(b"Hello World", overwrite=True, tags=tags) source_sas = self.generate_sas( generate_blob_sas, @@ -319,8 +325,8 @@ async def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): permission=BlobSasPermissions(read=True, tag=True), expiry=datetime.utcnow() + timedelta(hours=1), ) - source_url = source_blob.url + '?' + source_sas - dest_blob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + source_url = source_blob.url + "?" + source_sas + dest_blob = self.bsc.get_blob_client(self.container_name, "blob1copy") # Act with pytest.raises(ValueError): @@ -330,9 +336,9 @@ async def test_start_copy_from_url_with_tags_copy_tags(self, **kwargs): # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_tags = await dest_blob.get_blob_tags() @@ -350,7 +356,7 @@ async def test_start_copy_from_url_with_tags_replace_tags(self, **kwargs): tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} tags2 = {"hello": "world"} source_blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await source_blob.upload_blob(b'Hello World', overwrite=True, tags=tags) + await source_blob.upload_blob(b"Hello World", overwrite=True, tags=tags) source_sas = self.generate_sas( generate_blob_sas, @@ -361,17 +367,17 @@ async def test_start_copy_from_url_with_tags_replace_tags(self, **kwargs): permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) - source_url = source_blob.url + '?' + source_sas - dest_blob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + source_url = source_blob.url + "?" + source_sas + dest_blob = self.bsc.get_blob_client(self.container_name, "blob1copy") # Act copy = await dest_blob.start_copy_from_url(source_url, tags=tags2, requires_sync=True) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_tags = await dest_blob.get_blob_tags() @@ -391,7 +397,7 @@ async def test_list_blobs_returns_tags(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob_list = container.list_blobs(include="tags") - #Assert + # Assert async for blob in blob_list: assert blob.tag_count == len(tags) for key, value in blob.tags.items(): @@ -430,7 +436,9 @@ async def test_filter_blobs(self, **kwargs): assert 2 == len(items_on_page1) assert 2 == len(items_on_page2) - assert len(items_on_page2[0]['tags']) == 2 - assert items_on_page2[0]['tags']['tag1'] == 'firsttag' - assert items_on_page2[0]['tags']['tag2'] == 'secondtag' -#------------------------------------------------------------------------------ + assert len(items_on_page2[0]["tags"]) == 2 + assert items_on_page2[0]["tags"]["tag1"] == "firsttag" + assert items_on_page2[0]["tags"]["tag2"] == "secondtag" + + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_block_blob.py index 2ccb969c557b..a16e4e21e3ec 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob.py @@ -31,26 +31,27 @@ from settings.testcase import BlobPreparer from test_helpers import NonSeekableStream, ProgressTracker -#------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +# ------------------------------------------------------------------------------ +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 5 * 1024 + 5 TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ class TestStorageBlockBlob(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- - def _setup(self, storage_account_name, key, container_name='utcontainer'): + def _setup(self, storage_account_name, key, container_name="utcontainer"): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=key, max_single_put_size=1024, - max_block_size=1024) + max_block_size=1024, + ) self.config = self.bsc._config self.container_name = self.get_resource_name(container_name) - self.source_container_name = self.get_resource_name('utcontainersource1') + self.source_container_name = self.get_resource_name("utcontainersource1") if self.is_live: try: @@ -65,14 +66,16 @@ def _setup(self, storage_account_name, key, container_name='utcontainer'): def _get_blob_reference(self, prefix=TEST_BLOB_PREFIX): return self.get_resource_name(prefix) - def _create_blob(self, tags=None, data=b'', **kwargs): + def _create_blob(self, tags=None, data=b"", **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, tags=tags, overwrite=True, **kwargs) return blob def _create_source_blob(self, data): - blob_client = self.bsc.get_blob_client(self.source_container_name, self.get_resource_name(TEST_BLOB_PREFIX+"1")) + blob_client = self.bsc.get_blob_client( + self.source_container_name, self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) blob_client.upload_blob(data, overwrite=True) return blob_client @@ -81,7 +84,7 @@ def assertBlobEqual(self, container_name, blob_name, expected_data): actual_data = blob.download_blob() assert actual_data.readall() == expected_data - #--Test cases for block blobs -------------------------------------------- + # --Test cases for block blobs -------------------------------------------- @BlobPreparer() @recorded_by_proxy def test_upload_blob_from_url_with_oauth(self, **kwargs): @@ -93,7 +96,9 @@ def test_upload_blob_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(data=source_blob_data) destination_blob_client = self._create_blob() - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = "Bearer {}".format( + self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token + ) # Assert this operation fails without a credential with pytest.raises(HttpResponseError): @@ -119,21 +124,22 @@ def test_upload_blob_with_and_without_overwrite(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas ) - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - new_blob_client.upload_blob(b'destination blob data') + new_blob_client.upload_blob(b"destination blob data") # Assert with pytest.raises(ResourceExistsError): new_blob_client.upload_blob_from_url(source_blob, overwrite=False) new_blob = new_blob_client.upload_blob_from_url(source_blob, overwrite=True) assert new_blob is not None new_blob_content = new_blob_client.download_blob().readall() - assert new_blob_content == b'source blob data' + assert new_blob_content == b"source blob data" @BlobPreparer() @recorded_by_proxy @@ -151,10 +157,11 @@ def test_upload_blob_from_url_with_existing_blob(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas ) - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) @@ -162,7 +169,7 @@ def test_upload_blob_from_url_with_existing_blob(self, **kwargs): # Assert assert new_blob is not None new_blob_content = new_blob_client.download_blob().readall() - assert new_blob_content == b'test data' + assert new_blob_content == b"test data" @BlobPreparer() @recorded_by_proxy @@ -181,11 +188,12 @@ def test_upload_blob_from_url_with_standard_tier_specified(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # Act - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas + ) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -214,11 +222,12 @@ def test_upload_blob_from_url_with_cold_tier_specified(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # Act - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas + ) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -245,21 +254,22 @@ def test_upload_blob_with_destination_lease(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) new_blob_client.upload_blob(data="test") - new_blob_lease = new_blob_client.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + new_blob_lease = new_blob_client.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") with pytest.raises(HttpResponseError): new_blob_client.upload_blob_from_url( - source_blob_url, destination_lease="baddde9e-8247-4276-8bfa-c7a8081eba1d", overwrite=True) + source_blob_url, destination_lease="baddde9e-8247-4276-8bfa-c7a8081eba1d", overwrite=True + ) with pytest.raises(HttpResponseError): new_blob_client.upload_blob_from_url(source_blob_url) - new_blob_client.upload_blob_from_url( - source_blob_url, destination_lease=new_blob_lease) + new_blob_client.upload_blob_from_url(source_blob_url, destination_lease=new_blob_lease) @BlobPreparer() @recorded_by_proxy @@ -272,9 +282,11 @@ def test_upload_blob_from_url_if_match_condition(self, **kwargs): self._setup(storage_account_name, storage_account_key) source_blob = self._create_blob() early_test_datetime = self.get_datetime_variable( - variables, "early_test_dt", (datetime.utcnow() - timedelta(minutes=15))) + variables, "early_test_dt", (datetime.utcnow() - timedelta(minutes=15)) + ) late_test_datetime = self.get_datetime_variable( - variables, "late_test_dt", (datetime.utcnow() + timedelta(minutes=15))) + variables, "late_test_dt", (datetime.utcnow() + timedelta(minutes=15)) + ) sas = self.generate_sas( generate_blob_sas, account_name=storage_account_name, @@ -282,35 +294,38 @@ def test_upload_blob_from_url_if_match_condition(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) new_blob_client.upload_blob(data="fake data") # Assert with pytest.raises(ResourceModifiedError): - new_blob_client.upload_blob_from_url( - source_blob_url, if_modified_since=late_test_datetime, overwrite=True) - new_blob_client.upload_blob_from_url( - source_blob_url, if_modified_since=early_test_datetime, overwrite=True) + new_blob_client.upload_blob_from_url(source_blob_url, if_modified_since=late_test_datetime, overwrite=True) + new_blob_client.upload_blob_from_url(source_blob_url, if_modified_since=early_test_datetime, overwrite=True) with pytest.raises(ResourceModifiedError): new_blob_client.upload_blob_from_url( - source_blob_url, if_unmodified_since=early_test_datetime, overwrite=True) - new_blob_client.upload_blob_from_url( - source_blob_url, if_unmodified_since=late_test_datetime, overwrite=True) + source_blob_url, if_unmodified_since=early_test_datetime, overwrite=True + ) + new_blob_client.upload_blob_from_url(source_blob_url, if_unmodified_since=late_test_datetime, overwrite=True) with pytest.raises(ResourceNotFoundError): new_blob_client.upload_blob_from_url( - source_blob_url, source_if_modified_since=late_test_datetime, overwrite=True) + source_blob_url, source_if_modified_since=late_test_datetime, overwrite=True + ) new_blob_client.upload_blob_from_url( - source_blob_url, source_if_modified_since=early_test_datetime, overwrite=True) + source_blob_url, source_if_modified_since=early_test_datetime, overwrite=True + ) with pytest.raises(ResourceNotFoundError): new_blob_client.upload_blob_from_url( - source_blob_url, source_if_unmodified_since=early_test_datetime, overwrite=True) + source_blob_url, source_if_unmodified_since=early_test_datetime, overwrite=True + ) new_blob_client.upload_blob_from_url( - source_blob_url, source_if_unmodified_since=late_test_datetime, overwrite=True) + source_blob_url, source_if_unmodified_since=late_test_datetime, overwrite=True + ) return variables @@ -330,14 +345,14 @@ def test_upload_blob_from_url_with_cpk(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) - new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True, cpk=TEST_ENCRYPTION_KEY) + new_blob.upload_blob_from_url(source_blob_url, include_source_blob_properties=True, cpk=TEST_ENCRYPTION_KEY) # Assert with pytest.raises(HttpResponseError): @@ -353,15 +368,16 @@ def test_upload_blob_from_url_overwrite_properties(self, **kwargs): # Act self._setup(storage_account_name, storage_account_key) - source_blob_content_settings = ContentSettings(content_language='spanish') - new_blob_content_settings = ContentSettings(content_language='english') + source_blob_content_settings = ContentSettings(content_language="spanish") + new_blob_content_settings = ContentSettings(content_language="english") source_blob_tags = {"tag1": "sourcetag", "tag2": "secondsourcetag"} new_blob_tags = {"tag1": "copytag"} source_blob = self._create_blob( data=b"This is test data to be copied over.", tags=source_blob_tags, - content_settings=source_blob_content_settings) + content_settings=source_blob_content_settings, + ) sas = self.generate_sas( generate_blob_sas, account_name=storage_account_name, @@ -369,19 +385,22 @@ def test_upload_blob_from_url_overwrite_properties(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) - new_blob.upload_blob_from_url(source_blob_url, - include_source_blob_properties=True, - tags=new_blob_tags, - content_settings=new_blob_content_settings, - overwrite=True, - cpk=TEST_ENCRYPTION_KEY) + new_blob.upload_blob_from_url( + source_blob_url, + include_source_blob_properties=True, + tags=new_blob_tags, + content_settings=new_blob_content_settings, + overwrite=True, + cpk=TEST_ENCRYPTION_KEY, + ) new_blob_props = new_blob.get_blob_properties(cpk=TEST_ENCRYPTION_KEY) # Assert that source blob properties did not take precedence. @@ -407,19 +426,22 @@ def test_upload_blob_from_url_with_source_content_md5(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True, source_content_md5=source_md5) + source_blob_url, include_source_blob_properties=True, source_content_md5=source_md5 + ) with pytest.raises(HttpResponseError): new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=False, source_content_md5=bad_source_md5) + source_blob_url, include_source_blob_properties=False, source_content_md5=bad_source_md5 + ) new_blob_content_md5 = new_blob.get_blob_properties().content_settings.content_md5 assert new_blob_content_md5 == source_md5 @@ -432,16 +454,15 @@ def test_upload_blob_from_url_source_and_destination_properties(self, **kwargs): # Act self._setup(storage_account_name, storage_account_key) content_settings = ContentSettings( - content_type='application/octet-stream', - content_language='spanish', - content_disposition='inline' + content_type="application/octet-stream", content_language="spanish", content_disposition="inline" ) source_blob = self._create_blob( - data=b"This is test data to be copied over.", - tags={"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}, - content_settings=content_settings, - standard_blob_tier=StandardBlobTier.Cool) - source_blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + data=b"This is test data to be copied over.", + tags={"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}, + content_settings=content_settings, + standard_blob_tier=StandardBlobTier.Cool, + ) + source_blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") source_blob_props = source_blob.get_blob_properties() sas = self.generate_sas( generate_blob_sas, @@ -450,34 +471,37 @@ def test_upload_blob_from_url_source_and_destination_properties(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_copy1 = self.bsc.get_blob_client(self.container_name, blob_name) - new_blob_copy2 = self.bsc.get_blob_client(self.container_name, 'blob2copy') - new_blob_copy1.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True) - new_blob_copy2.upload_blob_from_url( - source_blob_url, include_source_blob_properties=False) + new_blob_copy2 = self.bsc.get_blob_client(self.container_name, "blob2copy") + new_blob_copy1.upload_blob_from_url(source_blob_url, include_source_blob_properties=True) + new_blob_copy2.upload_blob_from_url(source_blob_url, include_source_blob_properties=False) new_blob_copy1_props = new_blob_copy1.get_blob_properties() new_blob_copy2_props = new_blob_copy2.get_blob_properties() # Assert - assert new_blob_copy1_props.content_settings.content_language == \ - source_blob_props.content_settings.content_language - assert new_blob_copy2_props.content_settings.content_language != \ - source_blob_props.content_settings.content_language + assert ( + new_blob_copy1_props.content_settings.content_language + == source_blob_props.content_settings.content_language + ) + assert ( + new_blob_copy2_props.content_settings.content_language + != source_blob_props.content_settings.content_language + ) - assert source_blob_props.lease.status == 'locked' - assert new_blob_copy1_props.lease.status == 'unlocked' - assert new_blob_copy2_props.lease.status == 'unlocked' + assert source_blob_props.lease.status == "locked" + assert new_blob_copy1_props.lease.status == "unlocked" + assert new_blob_copy2_props.lease.status == "unlocked" - assert source_blob_props.blob_tier == 'Cool' - assert new_blob_copy1_props.blob_tier == 'Hot' - assert new_blob_copy2_props.blob_tier == 'Hot' + assert source_blob_props.blob_tier == "Cool" + assert new_blob_copy1_props.blob_tier == "Hot" + assert new_blob_copy2_props.blob_tier == "Hot" assert source_blob_props.tag_count == 3 assert new_blob_copy1_props.tag_count is None @@ -494,8 +518,8 @@ def test_put_block(self, **kwargs): # Act for i in range(5): - headers = blob.stage_block(i, 'block {0}'.format(i).encode('utf-8')) - assert 'content_crc64' in headers + headers = blob.stage_block(i, "block {0}".format(i).encode("utf-8")) + assert "content_crc64" in headers # Assert @@ -512,12 +536,12 @@ def return_response(resp, _, headers): return (resp, headers) # Act - resp, headers = blob.stage_block(0, 'block 0', cls=return_response) + resp, headers = blob.stage_block(0, "block 0", cls=return_response) # Assert # This has changed to resp.http_response.status_code since now we return the pipeline response assert 201 == resp.http_response.status_code - assert 'x-ms-content-crc64' in headers + assert "x-ms-content-crc64" in headers @BlobPreparer() @recorded_by_proxy @@ -529,8 +553,8 @@ def test_put_block_unicode(self, **kwargs): blob = self._create_blob() # Act - headers = blob.stage_block('1', u'啊齄丂狛狜') - assert 'content_crc64' in headers + headers = blob.stage_block("1", "啊齄丂狛狜") + assert "content_crc64" in headers # Assert @@ -544,7 +568,7 @@ def test_put_block_with_md5(self, **kwargs): blob = self._create_blob() # Act - blob.stage_block(1, b'block', validate_content=True) + blob.stage_block(1, b"block", validate_content=True) # Assert @@ -557,19 +581,19 @@ def test_put_block_list(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = blob.commit_block_list(block_list) # Assert content = blob.download_blob() - assert content.readall() == b'AAABBBCCC' - assert content.properties.etag == put_block_list_resp.get('etag') - assert content.properties.last_modified == put_block_list_resp.get('last_modified') + assert content.readall() == b"AAABBBCCC" + assert content.properties.etag == put_block_list_resp.get("etag") + assert content.properties.last_modified == put_block_list_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -580,47 +604,54 @@ def test_put_block_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - put_block_list_resp = blob.commit_block_list(block_list, - immutability_policy=immutability_policy, - legal_hold=True, - ) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + put_block_list_resp = blob.commit_block_list( + block_list, + immutability_policy=immutability_policy, + legal_hold=True, + ) # Assert download_resp = blob.download_blob() - assert download_resp.readall() == b'AAABBBCCC' - assert download_resp.properties.etag == put_block_list_resp.get('etag') - assert download_resp.properties.last_modified == put_block_list_resp.get('last_modified') - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert download_resp.readall() == b"AAABBBCCC" + assert download_resp.properties.etag == put_block_list_resp.get("etag") + assert download_resp.properties.last_modified == put_block_list_resp.get("last_modified") + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -633,17 +664,17 @@ def test_put_block_list_invalid_block_id(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act try: - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='4')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="4")] blob.commit_block_list(block_list) self.fail() except HttpResponseError as e: - assert str(e).find('specified block list is invalid') >= 0 + assert str(e).find("specified block list is invalid") >= 0 # Assert @@ -656,12 +687,12 @@ def test_put_block_list_with_md5(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list, validate_content=True) # Assert @@ -672,20 +703,18 @@ def test_put_block_list_with_blob_tier_specified(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - # Arrange self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - blob_client.stage_block('1', b'AAA') - blob_client.stage_block('2', b'BBB') - blob_client.stage_block('3', b'CCC') + blob_client.stage_block("1", b"AAA") + blob_client.stage_block("2", b"BBB") + blob_client.stage_block("3", b"CCC") blob_tier = StandardBlobTier.Cool # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - blob_client.commit_block_list(block_list, - standard_blob_tier=blob_tier) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + blob_client.commit_block_list(block_list, standard_blob_tier=blob_tier) # Assert blob_properties = blob_client.get_blob_properties() @@ -701,15 +730,14 @@ def test_put_block_list_with_blob_tier_specified_cold(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - blob_client.stage_block('1', b'AAA') - blob_client.stage_block('2', b'BBB') - blob_client.stage_block('3', b'CCC') + blob_client.stage_block("1", b"AAA") + blob_client.stage_block("2", b"BBB") + blob_client.stage_block("3", b"CCC") blob_tier = StandardBlobTier.Cold # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - blob_client.commit_block_list(block_list, - standard_blob_tier=blob_tier) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + blob_client.commit_block_list(block_list, standard_blob_tier=blob_tier) # Assert blob_properties = blob_client.get_blob_properties() @@ -727,8 +755,8 @@ def test_get_block_list_no_blocks(self, **kwargs): # Act with pytest.raises(ResourceModifiedError): - blob.get_block_list('all', if_tags_match_condition="\"condition tag\"='wrong tag'") - block_list = blob.get_block_list('all', if_tags_match_condition="\"tag1\"='firsttag'") + blob.get_block_list("all", if_tags_match_condition="\"condition tag\"='wrong tag'") + block_list = blob.get_block_list("all", if_tags_match_condition="\"tag1\"='firsttag'") # Assert assert block_list is not None @@ -744,23 +772,23 @@ def test_get_block_list_uncommitted_blocks(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") # Act - block_list = blob.get_block_list('uncommitted') + block_list = blob.get_block_list("uncommitted") # Assert assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 3 assert len(block_list[0]) == 0 - assert block_list[1][0].id == '1' + assert block_list[1][0].id == "1" assert block_list[1][0].size == 3 - assert block_list[1][1].id == '2' + assert block_list[1][1].id == "2" assert block_list[1][1].size == 3 - assert block_list[1][2].id == '3' + assert block_list[1][2].id == "3" assert block_list[1][2].size == 3 @BlobPreparer() @@ -772,26 +800,26 @@ def test_get_block_list_committed_blocks(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.stage_block('1', b'AAA') - blob.stage_block('2', b'BBB') - blob.stage_block('3', b'CCC') + blob.stage_block("1", b"AAA") + blob.stage_block("2", b"BBB") + blob.stage_block("3", b"CCC") - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] blob.commit_block_list(block_list) # Act - block_list = blob.get_block_list('committed') + block_list = blob.get_block_list("committed") # Assert assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 assert len(block_list[0]) == 3 - assert block_list[0][0].id == '1' + assert block_list[0][0].id == "1" assert block_list[0][0].size == 3 - assert block_list[0][1].id == '2' + assert block_list[0][1].id == "2" assert block_list[0][1].size == 3 - assert block_list[0][2].id == '3' + assert block_list[0][2].id == "3" assert block_list[0][2].size == 3 @BlobPreparer() @@ -803,8 +831,8 @@ def test_create_small_block_blob_with_no_overwrite(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data1 = b'hello world' - data2 = b'hello second world' + data1 = b"hello world" + data2 = b"hello second world" # Act create_resp = blob.upload_blob(data1, overwrite=True) @@ -816,8 +844,8 @@ def test_create_small_block_blob_with_no_overwrite(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data1) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob @BlobPreparer() @@ -831,8 +859,8 @@ def test_upload_blob_content_md5(self, **kwargs): blob2_name = self._get_blob_reference(prefix="blob2") blob1 = self.bsc.get_blob_client(self.container_name, blob1_name) blob2 = self.bsc.get_blob_client(self.container_name, blob2_name) - data1 = b'hello world' - data2 = b'hello world this wont work' + data1 = b"hello world" + data2 = b"hello world this wont work" # Act blob1.upload_blob(data1, overwrite=True) @@ -856,8 +884,8 @@ def test_create_small_block_blob_with_overwrite(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data1 = b'hello world' - data2 = b'hello second world' + data1 = b"hello world" + data2 = b"hello second world" # Act create_resp = blob.upload_blob(data1, overwrite=True) @@ -867,8 +895,8 @@ def test_create_small_block_blob_with_overwrite(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob @BlobPreparer() @@ -884,19 +912,19 @@ def test_create_large_block_blob_with_no_overwrite(self, **kwargs): data2 = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - create_resp = blob.upload_blob(data1, overwrite=True, metadata={'blobdata': 'data1'}) + create_resp = blob.upload_blob(data1, overwrite=True, metadata={"blobdata": "data1"}) with pytest.raises(ResourceExistsError): - blob.upload_blob(data2, overwrite=False, metadata={'blobdata': 'data2'}) + blob.upload_blob(data2, overwrite=False, metadata={"blobdata": "data2"}) props = blob.get_blob_properties() # Assert self.assertBlobEqual(self.container_name, blob_name, data1) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob - assert props.metadata == {'blobdata': 'data1'} + assert props.metadata == {"blobdata": "data1"} assert props.size == LARGE_BLOB_SIZE @BlobPreparer() @@ -912,17 +940,17 @@ def test_create_large_block_blob_with_overwrite(self, **kwargs): data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act - create_resp = blob.upload_blob(data1, overwrite=True, metadata={'blobdata': 'data1'}) - update_resp = blob.upload_blob(data2, overwrite=True, metadata={'blobdata': 'data2'}) + create_resp = blob.upload_blob(data1, overwrite=True, metadata={"blobdata": "data1"}) + update_resp = blob.upload_blob(data2, overwrite=True, metadata={"blobdata": "data2"}) props = blob.get_blob_properties() # Assert self.assertBlobEqual(self.container_name, blob_name, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob - assert props.metadata == {'blobdata': 'data2'} + assert props.metadata == {"blobdata": "data2"} assert props.size == LARGE_BLOB_SIZE + 512 @BlobPreparer() @@ -934,7 +962,7 @@ def test_create_blob_from_bytes_single_put(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" # Act create_resp = blob.upload_blob(data) @@ -942,8 +970,8 @@ def test_create_blob_from_bytes_single_put(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -954,7 +982,7 @@ def test_create_blob_from_0_bytes(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'' + data = b"" # Act create_resp = blob.upload_blob(data) @@ -962,8 +990,8 @@ def test_create_blob_from_0_bytes(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -974,7 +1002,7 @@ def test_create_from_bytes_blob_unicode(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = u'hello world' + data = "hello world" # Act create_resp = blob.upload_blob(data) @@ -982,8 +1010,8 @@ def test_create_from_bytes_blob_unicode(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -996,14 +1024,14 @@ def test_create_from_bytes_blob_unicode(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - data = u'hello world' + data = "hello world" create_resp = blob.upload_blob(data) props = blob.get_blob_properties() # Assert - self.assertBlobEqual(self.container_name, blob_name, data.encode('utf-8')) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + self.assertBlobEqual(self.container_name, blob_name, data.encode("utf-8")) + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1014,7 +1042,7 @@ def test_create_from_bytes_blob_with_lease_id(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob = self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act create_resp = blob.upload_blob(data, lease=lease) @@ -1022,8 +1050,8 @@ def test_create_from_bytes_blob_with_lease_id(self, **kwargs): # Assert output = blob.download_blob(lease=lease) assert output.readall() == data - assert output.properties.etag == create_resp.get('etag') - assert output.properties.last_modified == create_resp.get('last_modified') + assert output.properties.etag == create_resp.get("etag") + assert output.properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1035,7 +1063,7 @@ def test_create_blob_from_bytes_with_metadata(self, **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act blob.upload_blob(data, metadata=metadata) @@ -1056,9 +1084,7 @@ def test_create_blob_from_bytes_with_properties(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings=ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob.upload_blob(data, content_settings=content_settings) # Assert @@ -1080,9 +1106,10 @@ def test_create_blob_from_bytes_with_progress(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1092,8 +1119,8 @@ def callback(response): # Assert self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1141,9 +1168,7 @@ def test_create_blob_from_bytes_with_index_and_count_and_properties(self, **kwar data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings=ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob.upload_blob(data[3:], length=5, content_settings=content_settings) # Assert @@ -1179,7 +1204,7 @@ def test_create_blob_from_bytes_with_blob_tier_specified(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" blob_tier = StandardBlobTier.Cool # Act @@ -1209,8 +1234,8 @@ def test_create_blob_from_path(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1232,8 +1257,8 @@ def test_create_blob_from_path_non_parallel(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1270,9 +1295,10 @@ def test_create_blob_from_path_with_progress(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1297,9 +1323,7 @@ def test_create_blob_from_path_with_properties(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings=ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -1331,8 +1355,8 @@ def test_create_blob_from_stream_chunked_upload(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1390,9 +1414,10 @@ def test_create_blob_from_stream_with_progress_chunked_upload(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1438,9 +1463,7 @@ def test_create_from_stream_chunk_upload_with_cntandrops(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings=ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob_size = len(data) - 301 with tempfile.TemporaryFile() as temp_file: temp_file.write(data) @@ -1465,9 +1488,7 @@ def test_create_blob_from_stream_chunked_upload_with_properties(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings=ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -1494,13 +1515,13 @@ def test_create_blob_from_stream_chunked_upload_with_properties_parallel(self, * blob_tier = StandardBlobTier.Cool # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) - blob.upload_blob(temp_file, content_settings=content_settings, max_concurrency=2, standard_blob_tier=blob_tier) + blob.upload_blob( + temp_file, content_settings=content_settings, max_concurrency=2, standard_blob_tier=blob_tier + ) properties = blob.get_blob_properties() @@ -1516,8 +1537,8 @@ def test_create_blob_from_text(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-8') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-8") # Act create_resp = blob.upload_blob(text) @@ -1525,8 +1546,8 @@ def test_create_blob_from_text(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1537,11 +1558,11 @@ def test_create_blob_from_text_with_encoding(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act - blob.upload_blob(text, encoding='utf-16') + blob.upload_blob(text, encoding="utf-16") # Assert self.assertBlobEqual(self.container_name, blob_name, data) @@ -1555,18 +1576,19 @@ def test_create_blob_from_text_with_encoding_and_progress(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) - blob.upload_blob(text, encoding='utf-16', raw_response_hook=callback) + blob.upload_blob(text, encoding="utf-16", raw_response_hook=callback) # Assert self.assertBlobEqual(self.container_name, blob_name, data) @@ -1582,7 +1604,7 @@ def test_create_blob_from_text_chunked_upload(self, **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_text_data(LARGE_BLOB_SIZE) - encoded_data = data.encode('utf-8') + encoded_data = data.encode("utf-8") # Act blob.upload_blob(data) @@ -1602,7 +1624,7 @@ def test_create_blob_with_md5(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" # Act blob.upload_blob(data, validate_content=True) @@ -1635,22 +1657,25 @@ def test_upload_progress_single_put(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), len(data)) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, - credential=storage_account_key) + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, + credential=storage_account_key, + ) blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1663,23 +1688,27 @@ def test_upload_progress_chunked_non_parallel(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1693,23 +1722,27 @@ def test_upload_progress_chunked_parallel(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=3, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1723,24 +1756,28 @@ def test_upload_progress_unknown_size(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) stream = NonSeekableStream(BytesIO(data)) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) blob_client.upload_blob( data=stream, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=3, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1775,10 +1812,11 @@ def test_copy_blob_with_cold_tier(self, **kwargs): self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") blob_tier = StandardBlobTier.Cold copyblob.start_copy_from_url(sourceblob, standard_blob_tier=blob_tier) @@ -1805,4 +1843,5 @@ def test_set_blob_tier_cold_tier(self, **kwargs): # Assert assert props.blob_tier == StandardBlobTier.Cold -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py index 89856f1577ec..a10f5943c044 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py @@ -17,8 +17,11 @@ BlobBlock, StandardBlobTier, generate_blob_sas, - BlobSasPermissions, CustomerProvidedEncryptionKey, - BlobImmutabilityPolicyMode, ImmutabilityPolicy) + BlobSasPermissions, + CustomerProvidedEncryptionKey, + BlobImmutabilityPolicyMode, + ImmutabilityPolicy, +) from azure.storage.blob.aio import BlobClient, BlobServiceClient from azure.storage.blob._shared.policies import StorageContentValidation @@ -29,7 +32,7 @@ from test_helpers_async import NonSeekableStream, ProgressTracker # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 5 * 1024 + 5 TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) # ------------------------------------------------------------------------------ @@ -37,17 +40,18 @@ class TestStorageBlockBlobAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- - async def _setup(self, storage_account_name, key, container_name='utcontainer'): + async def _setup(self, storage_account_name, key, container_name="utcontainer"): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=key, max_single_put_size=1024, - max_block_size=1024) + max_block_size=1024, + ) self.config = self.bsc._config self.container_name = self.get_resource_name(container_name) - self.source_container_name = self.get_resource_name('utcontainersource1') + self.source_container_name = self.get_resource_name("utcontainersource1") if self.is_live: try: @@ -63,7 +67,7 @@ def _get_blob_reference(self, prefix=TEST_BLOB_PREFIX): return self.get_resource_name(prefix) def _get_blob_with_special_chars_reference(self): - return 'भारत¥test/testsubÐirÍ/' + self.get_resource_name('srcÆblob') + return "भारत¥test/testsubÐirÍ/" + self.get_resource_name("srcÆblob") async def _create_source_blob_url_with_special_chars(self, tags=None): blob_name = self._get_blob_with_special_chars_reference() @@ -81,15 +85,16 @@ async def _create_source_blob_url_with_special_chars(self, tags=None): ) return BlobClient.from_blob_url(blob.url, credential=sas_token_for_special_chars).url - async def _create_blob(self, tags=None, data=b'', **kwargs): + async def _create_blob(self, tags=None, data=b"", **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(data, tags=tags, **kwargs) return blob async def _create_source_blob(self, data): - blob_client = self.bsc.get_blob_client(self.source_container_name, - self.get_resource_name(TEST_BLOB_PREFIX + "1")) + blob_client = self.bsc.get_blob_client( + self.source_container_name, self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) await blob_client.upload_blob(data, overwrite=True) return blob_client @@ -111,15 +116,18 @@ async def test_upload_blob_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(data=source_blob_data) destination_blob_client = await self._create_blob() - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token( + "https://storage.azure.com/.default" + ) token = "Bearer {}".format(access_token.token) # Assert this operation fails without a credential with pytest.raises(HttpResponseError): await destination_blob_client.upload_blob_from_url(source_blob_client.url) # Assert it passes after passing an oauth credential - await destination_blob_client.upload_blob_from_url(source_blob_client.url, source_authorization=token, - overwrite=True) + await destination_blob_client.upload_blob_from_url( + source_blob_client.url, source_authorization=token, overwrite=True + ) destination_blob = await destination_blob_client.download_blob() destination_blob_data = await destination_blob.readall() assert source_blob_data == destination_blob_data @@ -140,14 +148,15 @@ async def test_upload_blob_with_and_without_overwrite(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas ) - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - await new_blob_client.upload_blob(b'destination blob data') + await new_blob_client.upload_blob(b"destination blob data") # Assert with pytest.raises(ResourceExistsError): await new_blob_client.upload_blob_from_url(source_blob, overwrite=False) @@ -155,7 +164,7 @@ async def test_upload_blob_with_and_without_overwrite(self, **kwargs): assert new_blob is not None new_blob_download = await new_blob_client.download_blob() new_blob_content = await new_blob_download.readall() - assert new_blob_content == b'source blob data' + assert new_blob_content == b"source blob data" @BlobPreparer() @recorded_by_proxy_async @@ -173,10 +182,11 @@ async def test_upload_blob_from_url_with_existing_blob(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas ) - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) @@ -185,7 +195,7 @@ async def test_upload_blob_from_url_with_existing_blob(self, **kwargs): assert new_blob is not None downloaded_blob = await new_blob_client.download_blob() new_blob_content = await downloaded_blob.readall() - assert new_blob_content == b'test data' + assert new_blob_content == b"test data" @BlobPreparer() @recorded_by_proxy_async @@ -204,11 +214,12 @@ async def test_upload_blob_from_url_with_standard_tier_specified(self, **kwargs) container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # Act - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas + ) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -237,11 +248,12 @@ async def test_upload_blob_from_url_with_cold_tier_specified(self, **kwargs): container_name=self.container_name, blob_name=blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) # Act - source_blob = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas) + source_blob = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob.blob_name, sas + ) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -268,21 +280,22 @@ async def test_upload_blob_with_destination_lease(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) await new_blob_client.upload_blob(data="test") - new_blob_lease = await new_blob_client.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + new_blob_lease = await new_blob_client.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") with pytest.raises(HttpResponseError): await new_blob_client.upload_blob_from_url( - source_blob_url, destination_lease="baddde9e-8247-4276-8bfa-c7a8081eba1d", overwrite=True) + source_blob_url, destination_lease="baddde9e-8247-4276-8bfa-c7a8081eba1d", overwrite=True + ) with pytest.raises(HttpResponseError): await new_blob_client.upload_blob_from_url(source_blob_url) - await new_blob_client.upload_blob_from_url( - source_blob_url, destination_lease=new_blob_lease) + await new_blob_client.upload_blob_from_url(source_blob_url, destination_lease=new_blob_lease) @BlobPreparer() @recorded_by_proxy_async @@ -295,9 +308,11 @@ async def test_upload_blob_from_url_if_match_condition(self, **kwargs): await self._setup(storage_account_name, storage_account_key) source_blob = await self._create_blob() early_test_datetime = self.get_datetime_variable( - variables, "early_test_dt", (datetime.utcnow() - timedelta(minutes=15))) + variables, "early_test_dt", (datetime.utcnow() - timedelta(minutes=15)) + ) late_test_datetime = self.get_datetime_variable( - variables, "late_test_dt", (datetime.utcnow() + timedelta(minutes=15))) + variables, "late_test_dt", (datetime.utcnow() + timedelta(minutes=15)) + ) sas = self.generate_sas( generate_blob_sas, account_name=storage_account_name, @@ -305,10 +320,11 @@ async def test_upload_blob_from_url_if_match_condition(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_client = self.bsc.get_blob_client(self.container_name, blob_name) await new_blob_client.upload_blob(data="fake data") @@ -316,24 +332,32 @@ async def test_upload_blob_from_url_if_match_condition(self, **kwargs): # Assert with pytest.raises(ResourceModifiedError): await new_blob_client.upload_blob_from_url( - source_blob_url, if_modified_since=late_test_datetime, overwrite=True) + source_blob_url, if_modified_since=late_test_datetime, overwrite=True + ) await new_blob_client.upload_blob_from_url( - source_blob_url, if_modified_since=early_test_datetime, overwrite=True) + source_blob_url, if_modified_since=early_test_datetime, overwrite=True + ) with pytest.raises(ResourceModifiedError): await new_blob_client.upload_blob_from_url( - source_blob_url, if_unmodified_since=early_test_datetime, overwrite=True) + source_blob_url, if_unmodified_since=early_test_datetime, overwrite=True + ) await new_blob_client.upload_blob_from_url( - source_blob_url, if_unmodified_since=late_test_datetime, overwrite=True) + source_blob_url, if_unmodified_since=late_test_datetime, overwrite=True + ) with pytest.raises(ResourceNotFoundError): await new_blob_client.upload_blob_from_url( - source_blob_url, source_if_modified_since=late_test_datetime, overwrite=True) + source_blob_url, source_if_modified_since=late_test_datetime, overwrite=True + ) await new_blob_client.upload_blob_from_url( - source_blob_url, source_if_modified_since=early_test_datetime, overwrite=True) + source_blob_url, source_if_modified_since=early_test_datetime, overwrite=True + ) with pytest.raises(ResourceNotFoundError): await new_blob_client.upload_blob_from_url( - source_blob_url, source_if_unmodified_since=early_test_datetime, overwrite=True) + source_blob_url, source_if_unmodified_since=early_test_datetime, overwrite=True + ) await new_blob_client.upload_blob_from_url( - source_blob_url, source_if_unmodified_since=late_test_datetime, overwrite=True) + source_blob_url, source_if_unmodified_since=late_test_datetime, overwrite=True + ) return variables @@ -354,14 +378,16 @@ async def test_upload_blob_from_url_with_cpk(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) await new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True, cpk=TEST_ENCRYPTION_KEY) + source_blob_url, include_source_blob_properties=True, cpk=TEST_ENCRYPTION_KEY + ) # Assert with pytest.raises(HttpResponseError): @@ -377,15 +403,16 @@ async def test_upload_blob_from_url_overwrite_properties(self, **kwargs): # Act await self._setup(storage_account_name, storage_account_key) - source_blob_content_settings = ContentSettings(content_language='spanish') - new_blob_content_settings = ContentSettings(content_language='english') + source_blob_content_settings = ContentSettings(content_language="spanish") + new_blob_content_settings = ContentSettings(content_language="english") source_blob_tags = {"tag1": "sourcetag", "tag2": "secondsourcetag"} new_blob_tags = {"tag1": "copytag"} source_blob = await self._create_blob( data=b"This is test data to be copied over.", tags=source_blob_tags, - content_settings=source_blob_content_settings) + content_settings=source_blob_content_settings, + ) sas = self.generate_sas( generate_blob_sas, account_name=storage_account_name, @@ -393,18 +420,21 @@ async def test_upload_blob_from_url_overwrite_properties(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) - await new_blob.upload_blob_from_url(source_blob_url, - include_source_blob_properties=True, - tags=new_blob_tags, - content_settings=new_blob_content_settings, - cpk=TEST_ENCRYPTION_KEY) + await new_blob.upload_blob_from_url( + source_blob_url, + include_source_blob_properties=True, + tags=new_blob_tags, + content_settings=new_blob_content_settings, + cpk=TEST_ENCRYPTION_KEY, + ) new_blob_props = await new_blob.get_blob_properties(cpk=TEST_ENCRYPTION_KEY) # Assert that source blob properties did not take precedence. @@ -430,19 +460,22 @@ async def test_upload_blob_from_url_with_source_content_md5(self, **kwargs): container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert await new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True, source_content_md5=source_md5) + source_blob_url, include_source_blob_properties=True, source_content_md5=source_md5 + ) with pytest.raises(HttpResponseError): await new_blob.upload_blob_from_url( - source_blob_url, include_source_blob_properties=False, source_content_md5=bad_source_md5) + source_blob_url, include_source_blob_properties=False, source_content_md5=bad_source_md5 + ) new_blob_props = await new_blob.get_blob_properties() new_blob_content_md5 = new_blob_props.content_settings.content_md5 assert new_blob_content_md5 == source_md5 @@ -456,17 +489,15 @@ async def test_upload_blob_from_url_source_and_destination_properties(self, **kw # Act await self._setup(storage_account_name, storage_account_key) content_settings = ContentSettings( - content_type='application/octet-stream', - content_language='spanish', - content_disposition='inline' + content_type="application/octet-stream", content_language="spanish", content_disposition="inline" ) source_blob = await self._create_blob( data=b"This is test data to be copied over.", tags={"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}, content_settings=content_settings, - standard_blob_tier=StandardBlobTier.Cool + standard_blob_tier=StandardBlobTier.Cool, ) - await source_blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + await source_blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") source_blob_props = await source_blob.get_blob_properties() sas = self.generate_sas( generate_blob_sas, @@ -475,35 +506,38 @@ async def test_upload_blob_from_url_source_and_destination_properties(self, **kw container_name=self.container_name, blob_name=source_blob.blob_name, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), + ) + source_blob_url = "{0}/{1}/{2}?{3}".format( + self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas ) - source_blob_url = '{0}/{1}/{2}?{3}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, source_blob.blob_name, sas) blob_name = self.get_resource_name("blobcopy") new_blob_copy1 = self.bsc.get_blob_client(self.container_name, blob_name) - new_blob_copy2 = self.bsc.get_blob_client(self.container_name, 'blob2copy') - await new_blob_copy1.upload_blob_from_url( - source_blob_url, include_source_blob_properties=True) - await new_blob_copy2.upload_blob_from_url( - source_blob_url, include_source_blob_properties=False) + new_blob_copy2 = self.bsc.get_blob_client(self.container_name, "blob2copy") + await new_blob_copy1.upload_blob_from_url(source_blob_url, include_source_blob_properties=True) + await new_blob_copy2.upload_blob_from_url(source_blob_url, include_source_blob_properties=False) new_blob_copy1_props = await new_blob_copy1.get_blob_properties() new_blob_copy2_props = await new_blob_copy2.get_blob_properties() # Assert - assert new_blob_copy1_props.content_settings.content_language == \ - source_blob_props.content_settings.content_language - assert new_blob_copy2_props.content_settings.content_language != \ - source_blob_props.content_settings.content_language + assert ( + new_blob_copy1_props.content_settings.content_language + == source_blob_props.content_settings.content_language + ) + assert ( + new_blob_copy2_props.content_settings.content_language + != source_blob_props.content_settings.content_language + ) - assert source_blob_props.lease.status == 'locked' - assert new_blob_copy1_props.lease.status == 'unlocked' - assert new_blob_copy2_props.lease.status == 'unlocked' + assert source_blob_props.lease.status == "locked" + assert new_blob_copy1_props.lease.status == "unlocked" + assert new_blob_copy2_props.lease.status == "unlocked" - assert source_blob_props.blob_tier == 'Cool' - assert new_blob_copy1_props.blob_tier == 'Hot' - assert new_blob_copy2_props.blob_tier == 'Hot' + assert source_blob_props.blob_tier == "Cool" + assert new_blob_copy1_props.blob_tier == "Hot" + assert new_blob_copy2_props.blob_tier == "Hot" assert source_blob_props.tag_count == 3 assert new_blob_copy1_props.tag_count is None @@ -521,8 +555,8 @@ async def test_put_block(self, **kwargs): # Act for i in range(5): - headers = await blob.stage_block(i, 'block {0}'.format(i).encode('utf-8')) - assert 'content_crc64' in headers + headers = await blob.stage_block(i, "block {0}".format(i).encode("utf-8")) + assert "content_crc64" in headers # Assert @@ -541,8 +575,8 @@ async def test_copy_blob(self, **kwargs): # Assert assert copy_props is not None - assert copy_props['copy_id'] is not None - assert 'success' == copy_props['copy_status'] + assert copy_props["copy_id"] is not None + assert "success" == copy_props["copy_status"] @BlobPreparer() @recorded_by_proxy_async @@ -556,23 +590,19 @@ async def test_put_block_from_url_and_commit(self, **kwargs): split = 4 * 1024 # Act part 1: make put block from url calls await dest_blob.stage_block_from_url( - block_id=1, - source_url=source_blob_url, - source_offset=0, - source_length=split) + block_id=1, source_url=source_blob_url, source_offset=0, source_length=split + ) await dest_blob.stage_block_from_url( - block_id=2, - source_url=source_blob_url, - source_offset=split, - source_length=split) + block_id=2, source_url=source_blob_url, source_offset=split, source_length=split + ) # Assert blocks - committed, uncommitted = await dest_blob.get_block_list('all') + committed, uncommitted = await dest_blob.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # Act part 2: commit the blocks - await dest_blob.commit_block_list(['1', '2']) - committed, uncommitted = await dest_blob.get_block_list('all') + await dest_blob.commit_block_list(["1", "2"]) + committed, uncommitted = await dest_blob.get_block_list("all") assert len(uncommitted) == 0 assert len(committed) == 2 @@ -591,11 +621,11 @@ def return_response(resp, _, headers): blob = await self._create_blob() # Act - resp, headers = await blob.stage_block(0, 'block 0', cls=return_response) + resp, headers = await blob.stage_block(0, "block 0", cls=return_response) # Assert assert 201 == resp.http_response.status_code - assert 'x-ms-content-crc64' in headers + assert "x-ms-content-crc64" in headers @BlobPreparer() @recorded_by_proxy_async @@ -608,8 +638,8 @@ async def test_put_block_unicode(self, **kwargs): blob = await self._create_blob() # Act - headers = await blob.stage_block('1', u'啊齄丂狛狜') - assert 'content_crc64' in headers + headers = await blob.stage_block("1", "啊齄丂狛狜") + assert "content_crc64" in headers # Assert @@ -624,7 +654,7 @@ async def test_put_block_with_md5(self, **kwargs): blob = await self._create_blob() # Act - await blob.stage_block(1, b'block', validate_content=True) + await blob.stage_block(1, b"block", validate_content=True) # Assert @@ -638,20 +668,20 @@ async def test_put_block_list(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = await blob.commit_block_list(block_list) # Assert content = await blob.download_blob() actual = await content.readall() - assert actual == b'AAABBBCCC' - assert content.properties.etag == put_block_list_resp.get('etag') - assert content.properties.last_modified == put_block_list_resp.get('last_modified') + assert actual == b"AAABBBCCC" + assert content.properties.etag == put_block_list_resp.get("etag") + assert content.properties.last_modified == put_block_list_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -662,50 +692,55 @@ async def test_put_block_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, - container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") # Act expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - put_block_list_resp = await blob.commit_block_list(block_list, - immutability_policy=immutability_policy, - legal_hold=True, - ) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + put_block_list_resp = await blob.commit_block_list( + block_list, + immutability_policy=immutability_policy, + legal_hold=True, + ) # Assert download_resp = await blob.download_blob() content = await download_resp.readall() - assert content == b'AAABBBCCC' - assert download_resp.properties.etag == put_block_list_resp.get('etag') - assert download_resp.properties.last_modified == put_block_list_resp.get('last_modified') - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert content == b"AAABBBCCC" + assert download_resp.properties.etag == put_block_list_resp.get("etag") + assert download_resp.properties.last_modified == put_block_list_resp.get("last_modified") + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, - container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -719,17 +754,17 @@ async def test_put_block_list_invalid_block_id(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") # Act try: - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='4')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="4")] await blob.commit_block_list(block_list) self.fail() except HttpResponseError as e: - assert str(e).find('specified block list is invalid') >= 0 + assert str(e).find("specified block list is invalid") >= 0 # Assert @@ -743,12 +778,12 @@ async def test_put_block_list_with_md5(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list, validate_content=True) # Assert @@ -760,15 +795,14 @@ async def test_put_block_list_with_blob_tier_specified(self, storage_account_nam await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - await blob_client.stage_block('1', b'AAA') - await blob_client.stage_block('2', b'BBB') - await blob_client.stage_block('3', b'CCC') + await blob_client.stage_block("1", b"AAA") + await blob_client.stage_block("2", b"BBB") + await blob_client.stage_block("3", b"CCC") blob_tier = StandardBlobTier.Cool # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - await blob_client.commit_block_list(block_list, - standard_blob_tier=blob_tier) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + await blob_client.commit_block_list(block_list, standard_blob_tier=blob_tier) # Assert blob_properties = await blob_client.get_blob_properties() @@ -781,15 +815,14 @@ async def test_put_block_list_with_blob_tier_specified_cold(self, storage_accoun await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - await blob_client.stage_block('1', b'AAA') - await blob_client.stage_block('2', b'BBB') - await blob_client.stage_block('3', b'CCC') + await blob_client.stage_block("1", b"AAA") + await blob_client.stage_block("2", b"BBB") + await blob_client.stage_block("3", b"CCC") blob_tier = StandardBlobTier.Cold # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - await blob_client.commit_block_list(block_list, - standard_blob_tier=blob_tier) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + await blob_client.commit_block_list(block_list, standard_blob_tier=blob_tier) # Assert blob_properties = await blob_client.get_blob_properties() @@ -808,8 +841,8 @@ async def test_get_block_list_no_blocks(self, **kwargs): # Act with pytest.raises(ResourceModifiedError): - await blob.get_block_list('all', if_tags_match_condition="\"condition tag\"='wrong tag'") - block_list = await blob.get_block_list('all', if_tags_match_condition="\"tag1\"='firsttag'") + await blob.get_block_list("all", if_tags_match_condition="\"condition tag\"='wrong tag'") + block_list = await blob.get_block_list("all", if_tags_match_condition="\"tag1\"='firsttag'") # Assert assert block_list is not None @@ -826,23 +859,23 @@ async def test_get_block_list_uncommitted_blocks(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") # Act - block_list = await blob.get_block_list('uncommitted') + block_list = await blob.get_block_list("uncommitted") # Assert assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 3 assert len(block_list[0]) == 0 - assert block_list[1][0].id == '1' + assert block_list[1][0].id == "1" assert block_list[1][0].size == 3 - assert block_list[1][1].id == '2' + assert block_list[1][1].id == "2" assert block_list[1][1].size == 3 - assert block_list[1][2].id == '3' + assert block_list[1][2].id == "3" assert block_list[1][2].size == 3 @BlobPreparer() @@ -855,26 +888,26 @@ async def test_get_block_list_committed_blocks(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.stage_block('1', b'AAA') - await blob.stage_block('2', b'BBB') - await blob.stage_block('3', b'CCC') + await blob.stage_block("1", b"AAA") + await blob.stage_block("2", b"BBB") + await blob.stage_block("3", b"CCC") - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] await blob.commit_block_list(block_list) # Act - block_list = await blob.get_block_list('committed') + block_list = await blob.get_block_list("committed") # Assert assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 assert len(block_list[0]) == 3 - assert block_list[0][0].id == '1' + assert block_list[0][0].id == "1" assert block_list[0][0].size == 3 - assert block_list[0][1].id == '2' + assert block_list[0][1].id == "2" assert block_list[0][1].size == 3 - assert block_list[0][2].id == '3' + assert block_list[0][2].id == "3" assert block_list[0][2].size == 3 @BlobPreparer() @@ -888,8 +921,8 @@ async def test_upload_blob_content_md5(self, **kwargs): blob2_name = self._get_blob_reference(prefix="blob2") blob1 = self.bsc.get_blob_client(self.container_name, blob1_name) blob2 = self.bsc.get_blob_client(self.container_name, blob2_name) - data1 = b'hello world' - data2 = b'hello world this wont work' + data1 = b"hello world" + data2 = b"hello world this wont work" # Act await blob1.upload_blob(data1, overwrite=True) @@ -916,8 +949,8 @@ async def test_create_small_block_blob_with_no_overwrite(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data1 = b'hello world' - data2 = b'hello second world' + data1 = b"hello world" + data2 = b"hello second world" # Act create_resp = await blob.upload_blob(data1, overwrite=True) @@ -929,8 +962,8 @@ async def test_create_small_block_blob_with_no_overwrite(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data1) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob @BlobPreparer() @@ -943,8 +976,8 @@ async def test_create_small_block_blob_with_overwrite(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data1 = b'hello world' - data2 = b'hello second world' + data1 = b"hello world" + data2 = b"hello second world" # Act create_resp = await blob.upload_blob(data1, overwrite=True) @@ -954,8 +987,8 @@ async def test_create_small_block_blob_with_overwrite(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob @BlobPreparer() @@ -972,19 +1005,19 @@ async def test_create_large_block_blob_with_no_overwrite(self, **kwargs): data2 = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - create_resp = await blob.upload_blob(data1, overwrite=True, metadata={'blobdata': 'data1'}) + create_resp = await blob.upload_blob(data1, overwrite=True, metadata={"blobdata": "data1"}) with pytest.raises(ResourceExistsError): - await blob.upload_blob(data2, overwrite=False, metadata={'blobdata': 'data2'}) + await blob.upload_blob(data2, overwrite=False, metadata={"blobdata": "data2"}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data1) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob - assert props.metadata == {'blobdata': 'data1'} + assert props.metadata == {"blobdata": "data1"} assert props.size == LARGE_BLOB_SIZE @BlobPreparer() @@ -1001,17 +1034,17 @@ async def test_create_large_block_blob_with_overwrite(self, **kwargs): data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act - create_resp = await blob.upload_blob(data1, overwrite=True, metadata={'blobdata': 'data1'}) - update_resp = await blob.upload_blob(data2, overwrite=True, metadata={'blobdata': 'data2'}) + create_resp = await blob.upload_blob(data1, overwrite=True, metadata={"blobdata": "data1"}) + update_resp = await blob.upload_blob(data2, overwrite=True, metadata={"blobdata": "data2"}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data2) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") assert props.blob_type == BlobType.BlockBlob - assert props.metadata == {'blobdata': 'data2'} + assert props.metadata == {"blobdata": "data2"} assert props.size == LARGE_BLOB_SIZE + 512 @BlobPreparer() @@ -1024,7 +1057,7 @@ async def test_create_blob_from_bytes_single_put(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" # Act create_resp = await blob.upload_blob(data) @@ -1032,8 +1065,8 @@ async def test_create_blob_from_bytes_single_put(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1045,7 +1078,7 @@ async def test_create_blob_from_0_bytes(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'' + data = b"" # Act create_resp = await blob.upload_blob(data) @@ -1053,8 +1086,8 @@ async def test_create_blob_from_0_bytes(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1066,7 +1099,7 @@ async def test_create_from_bytes_blob_unicode(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" # Act create_resp = await blob.upload_blob(data) @@ -1074,8 +1107,8 @@ async def test_create_from_bytes_blob_unicode(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1088,7 +1121,7 @@ async def test_create_from_bytes_blob_with_lease_id(self, **kwargs): # Arrange blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act create_resp = await blob.upload_blob(data, lease=lease) @@ -1097,8 +1130,8 @@ async def test_create_from_bytes_blob_with_lease_id(self, **kwargs): output = await blob.download_blob(lease=lease) actual = await output.readall() assert actual == data - assert output.properties.etag == create_resp.get('etag') - assert output.properties.last_modified == create_resp.get('last_modified') + assert output.properties.etag == create_resp.get("etag") + assert output.properties.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1112,7 +1145,7 @@ async def test_create_blob_from_bytes_with_metadata(self, **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act await blob.upload_blob(data, metadata=metadata) @@ -1136,9 +1169,7 @@ async def test_create_blob_from_bytes_with_properties(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") await blob.upload_blob(data, content_settings=content_settings) # Assert @@ -1164,8 +1195,8 @@ async def test_create_blob_from_bytes_with_progress(self, **kwargs): progress = [] def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1175,8 +1206,8 @@ def callback(response): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1232,9 +1263,7 @@ async def test_create_frm_bytes_with_index_cnt_props(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") await blob.upload_blob(data[3:], length=5, content_settings=content_settings) # Assert @@ -1270,7 +1299,7 @@ async def test_create_blob_from_bytes_with_blob_tier_specified(self, storage_acc await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" blob_tier = StandardBlobTier.Cool # Act @@ -1302,8 +1331,8 @@ async def test_create_blob_from_path(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1326,17 +1355,19 @@ async def test_create_blob_from_path_non_parallel(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async - async def test_upload_blob_from_path_non_parallel_with_standard_blob_tier(self, storage_account_name, storage_account_key): + async def test_upload_blob_from_path_non_parallel_with_standard_blob_tier( + self, storage_account_name, storage_account_key + ): # Arrange await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - FILE_PATH = 'non_parallel_with_standard_blob_tier.temp.{}.dat'.format(str(uuid.uuid4())) + FILE_PATH = "non_parallel_with_standard_blob_tier.temp.{}.dat".format(str(uuid.uuid4())) data = self.get_random_bytes(100) blob_tier = StandardBlobTier.Cool # Act @@ -1366,8 +1397,8 @@ async def test_create_blob_from_path_with_progress(self, **kwargs): progress = [] def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1394,9 +1425,7 @@ async def test_create_blob_from_path_with_properties(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -1430,8 +1459,8 @@ async def test_create_blob_from_stream_chunked_upload(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1497,8 +1526,8 @@ async def test_create_blob_from_stream_with_progress_chunked_upload(self, **kwar progress = [] def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1548,9 +1577,7 @@ async def test_create_frm_stream_chu_upld_with_countandprops(self, **kwargs): data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob_size = len(data) - 301 with tempfile.TemporaryFile() as temp_file: temp_file.write(data) @@ -1576,9 +1603,7 @@ async def test_create_blob_from_stream_chunked_upload_with_properties(self, **kw data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -1604,13 +1629,13 @@ async def test_create_blob_from_stream_chunked_upload_with_properties_parallel(s blob_tier = StandardBlobTier.Cool # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) - await blob.upload_blob(temp_file, content_settings=content_settings, max_concurrency=2, standard_blob_tier=blob_tier) + await blob.upload_blob( + temp_file, content_settings=content_settings, max_concurrency=2, standard_blob_tier=blob_tier + ) properties = await blob.get_blob_properties() @@ -1627,8 +1652,8 @@ async def test_create_blob_from_text(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-8') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-8") # Act create_resp = await blob.upload_blob(text) @@ -1636,8 +1661,8 @@ async def test_create_blob_from_text(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1649,11 +1674,11 @@ async def test_create_blob_from_text_with_encoding(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act - await blob.upload_blob(text, encoding='utf-16') + await blob.upload_blob(text, encoding="utf-16") # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @@ -1668,19 +1693,19 @@ async def test_create_blob_from_text_with_encoding_and_progress(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - text = u'hello 啊齄丂狛狜 world' - data = text.encode('utf-16') + text = "hello 啊齄丂狛狜 world" + data = text.encode("utf-16") # Act progress = [] def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) - await blob.upload_blob(text, encoding='utf-16', raw_response_hook=callback) + await blob.upload_blob(text, encoding="utf-16", raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @@ -1698,7 +1723,7 @@ async def test_create_blob_from_text_chunked_upload(self, **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_text_data(LARGE_BLOB_SIZE) - encoded_data = data.encode('utf-8') + encoded_data = data.encode("utf-8") # Act await blob.upload_blob(data) @@ -1716,7 +1741,7 @@ async def test_create_blob_with_md5(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - data = b'hello world' + data = b"hello world" # Act await blob.upload_blob(data, validate_content=True) @@ -1747,22 +1772,25 @@ async def test_upload_progress_single_put(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), len(data)) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, - credential=storage_account_key) + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, + credential=storage_account_key, + ) await blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1775,23 +1803,27 @@ async def test_upload_progress_chunked_non_parallel(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) await blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=1, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1805,23 +1837,27 @@ async def test_upload_progress_chunked_parallel(self, **kwargs): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) await blob_client.upload_blob( data, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=3, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1835,24 +1871,28 @@ async def test_upload_progress_unknown_size(self, **kwargs): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) stream = NonSeekableStream(BytesIO(data)) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_block_size=1024) + max_single_put_size=1024, + max_block_size=1024, + ) await blob_client.upload_blob( data=stream, blob_type=BlobType.BlockBlob, overwrite=True, max_concurrency=3, - progress_hook=progress.assert_progress) + progress_hook=progress.assert_progress, + ) # Assert progress.assert_complete() @@ -1887,10 +1927,11 @@ async def test_copy_blob_with_cold_tier(self, **kwargs): self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") blob_tier = StandardBlobTier.Cold await copyblob.start_copy_from_url(sourceblob, standard_blob_tier=blob_tier) @@ -1917,4 +1958,5 @@ async def test_set_blob_tier_cold_tier(self, **kwargs): # Assert assert props.blob_tier == StandardBlobTier.Cold + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy.py b/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy.py index 4acfc5fd998b..e2799372c918 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy.py @@ -13,7 +13,7 @@ BlobServiceClient, generate_blob_sas, StandardBlobTier, - StorageErrorCode + StorageErrorCode, ) from azure.storage.blob._shared.policies import StorageContentValidation @@ -27,30 +27,34 @@ # ------------------------------------------------------------------------------ + class TestStorageBlockBlob(StorageRecordedTestCase): - def _setup(self, storage_account_name, key, container_prefix='utcontainer'): + def _setup(self, storage_account_name, key, container_prefix="utcontainer"): account_url = self.account_url(storage_account_name, "blob") if not isinstance(account_url, str): - account_url = account_url.encode('utf-8') - key = key.encode('utf-8') + account_url = account_url.encode("utf-8") + key = key.encode("utf-8") self.bsc = BlobServiceClient( account_url, credential=key, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, - max_block_size=4 * 1024) + max_block_size=4 * 1024, + ) self.config = self.bsc._config self.container_name = self.get_resource_name(container_prefix) # create source blob to be copied from - self.source_blob_name = self.get_resource_name('srcblob') - self.source_blob_name_with_special_chars = 'भारत¥test/testsubÐirÍ/'+self.get_resource_name('srcÆblob') + self.source_blob_name = self.get_resource_name("srcblob") + self.source_blob_name_with_special_chars = "भारत¥test/testsubÐirÍ/" + self.get_resource_name("srcÆblob") self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) self.source_blob_with_special_chars_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) - blob_with_special_chars = self.bsc.get_blob_client(self.container_name, self.source_blob_name_with_special_chars) + blob_with_special_chars = self.bsc.get_blob_client( + self.container_name, self.source_blob_name_with_special_chars + ) if self.is_live: self.bsc.create_container(self.container_name) @@ -82,7 +86,8 @@ def _setup(self, storage_account_name, key, container_prefix='utcontainer'): self.source_blob_url_without_sas = blob.url self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url self.source_blob_url_with_special_chars = BlobClient.from_blob_url( - blob_with_special_chars.url, credential=sas_token_for_special_chars).url + blob_with_special_chars.url, credential=sas_token_for_special_chars + ).url @BlobPreparer() @recorded_by_proxy @@ -93,37 +98,39 @@ def test_put_block_from_url_with_oauth(self, **kwargs): # Arrange self._setup(storage_account_name, storage_account_key, container_prefix="container1") split = 4 * 1024 - destination_blob_name = self.get_resource_name('destblob') + destination_blob_name = self.get_resource_name("destblob") destination_blob_client = self.bsc.get_blob_client(self.container_name, destination_blob_name) - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = "Bearer {}".format( + self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token + ) # Assert this operation fails without a credential with pytest.raises(HttpResponseError): destination_blob_client.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url_without_sas, - source_offset=0, - source_length=split) + block_id=1, source_url=self.source_blob_url_without_sas, source_offset=0, source_length=split + ) # Assert it passes after passing an oauth credential destination_blob_client.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url_without_sas, - source_offset=0, - source_length=split, - source_authorization=token) + block_id=1, + source_url=self.source_blob_url_without_sas, + source_offset=0, + source_length=split, + source_authorization=token, + ) destination_blob_client.stage_block_from_url( block_id=2, source_url=self.source_blob_url_without_sas, source_offset=split, source_length=split, - source_authorization=token) + source_authorization=token, + ) - committed, uncommitted = destination_blob_client.get_block_list('all') + committed, uncommitted = destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # Act part 2: commit the blocks - destination_blob_client.commit_block_list(['1', '2']) + destination_blob_client.commit_block_list(["1", "2"]) # Assert destination blob has right content destination_blob_data = destination_blob_client.download_blob().readall() @@ -138,29 +145,25 @@ def test_put_block_from_url_and_commit(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act part 1: make put block from url calls split = 4 * 1024 dest_blob.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url, - source_offset=0, - source_length=split) + block_id=1, source_url=self.source_blob_url, source_offset=0, source_length=split + ) dest_blob.stage_block_from_url( - block_id=2, - source_url=self.source_blob_url, - source_offset=split, - source_length=split) + block_id=2, source_url=self.source_blob_url, source_offset=split, source_length=split + ) # Assert blocks - committed, uncommitted = dest_blob.get_block_list('all') + committed, uncommitted = dest_blob.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # Act part 2: commit the blocks - dest_blob.commit_block_list(['1', '2']) + dest_blob.commit_block_list(["1", "2"]) # Assert destination blob has right content content = dest_blob.download_blob().readall() @@ -168,23 +171,19 @@ def test_put_block_from_url_and_commit(self, **kwargs): assert content == self.source_blob_data dest_blob.stage_block_from_url( - block_id=3, - source_url=self.source_blob_url_with_special_chars, - source_offset=0, - source_length=split) + block_id=3, source_url=self.source_blob_url_with_special_chars, source_offset=0, source_length=split + ) dest_blob.stage_block_from_url( - block_id=4, - source_url=self.source_blob_url_with_special_chars, - source_offset=split, - source_length=split) + block_id=4, source_url=self.source_blob_url_with_special_chars, source_offset=split, source_length=split + ) # Assert blocks - committed, uncommitted = dest_blob.get_block_list('all') + committed, uncommitted = dest_blob.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 2 # Act part 2: commit the blocks - dest_blob.commit_block_list(['3', '4']) + dest_blob.commit_block_list(["3", "4"]) # Assert destination blob has right content content = dest_blob.download_blob().readall() @@ -198,7 +197,7 @@ def test_put_block_from_url_and_validate_content_md5(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) src_md5 = StorageContentValidation.get_content_md5(self.source_blob_data) @@ -208,10 +207,11 @@ def test_put_block_from_url_and_validate_content_md5(self, **kwargs): source_url=self.source_blob_url, source_content_md5=src_md5, source_offset=0, - source_length=8 * 1024) + source_length=8 * 1024, + ) # Assert block was staged - committed, uncommitted = dest_blob.get_block_list('all') + committed, uncommitted = dest_blob.get_block_list("all") assert len(uncommitted) == 1 assert len(committed) == 0 @@ -223,11 +223,12 @@ def test_put_block_from_url_and_validate_content_md5(self, **kwargs): source_url=self.source_blob_url, source_content_md5=fake_md5, source_offset=0, - source_length=8 * 1024) + source_length=8 * 1024, + ) assert error.value.error_code == StorageErrorCode.md5_mismatch # Assert block was not staged - committed, uncommitted = dest_blob.get_block_list('all') + committed, uncommitted = dest_blob.get_block_list("all") assert len(uncommitted) == 1 assert len(committed) == 0 @@ -238,7 +239,7 @@ def test_copy_blob_sync(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act @@ -246,19 +247,21 @@ def test_copy_blob_sync(self, **kwargs): # Assert assert copy_props is not None - assert (copy_props['copy_id']) is not None - assert 'success' == copy_props['copy_status'] + assert (copy_props["copy_id"]) is not None + assert "success" == copy_props["copy_status"] # Verify content content = dest_blob.download_blob().readall() assert self.source_blob_data == content - copy_props_with_special_chars = dest_blob.start_copy_from_url(self.source_blob_url_with_special_chars, requires_sync=True) + copy_props_with_special_chars = dest_blob.start_copy_from_url( + self.source_blob_url_with_special_chars, requires_sync=True + ) # Assert assert copy_props_with_special_chars is not None - assert copy_props_with_special_chars['copy_id'] is not None - assert 'success' == copy_props_with_special_chars['copy_status'] + assert copy_props_with_special_chars["copy_id"] is not None + assert "success" == copy_props_with_special_chars["copy_status"] # Verify content content = dest_blob.download_blob().readall() @@ -271,7 +274,7 @@ def test_copy_blob_with_cold_tier_sync(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) blob_tier = StandardBlobTier.Cold @@ -289,17 +292,17 @@ def test_sync_copy_blob_returns_vid(self, **kwargs): storage_account_key = kwargs.pop("versioned_storage_account_key") self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act copy_props = dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True) # Assert - assert copy_props['version_id'] is not None + assert copy_props["version_id"] is not None assert copy_props is not None - assert copy_props['copy_id'] is not None - assert 'success' == copy_props['copy_status'] + assert copy_props["copy_id"] is not None + assert "success" == copy_props["copy_status"] # Verify content content = dest_blob.download_blob().readall() diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy_async.py b/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy_async.py index 47ec8333194f..0708d3dd70e8 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob_sync_copy_async.py @@ -33,12 +33,12 @@ async def _setup(self, storage_account_name, key): connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, - ) + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") # create source blob to be copied from - self.source_blob_name = self.get_resource_name('srcblob') + self.source_blob_name = self.get_resource_name("srcblob") self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) @@ -73,38 +73,40 @@ async def test_put_block_from_url_with_oauth(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) split = 4 * 1024 - destination_blob_name = self.get_resource_name('destblob') + destination_blob_name = self.get_resource_name("destblob") destination_blob_client = self.bsc.get_blob_client(self.container_name, destination_blob_name) - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token( + "https://storage.azure.com/.default" + ) token = "Bearer {}".format(access_token.token) # Assert this operation fails without a credential with pytest.raises(HttpResponseError): await destination_blob_client.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url_without_sas, - source_offset=0, - source_length=split) + block_id=1, source_url=self.source_blob_url_without_sas, source_offset=0, source_length=split + ) # Assert it passes after passing an oauth credential await destination_blob_client.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url_without_sas, - source_offset=0, - source_length=split, - source_authorization=token) + block_id=1, + source_url=self.source_blob_url_without_sas, + source_offset=0, + source_length=split, + source_authorization=token, + ) await destination_blob_client.stage_block_from_url( block_id=2, source_url=self.source_blob_url_without_sas, source_offset=split, source_length=split, - source_authorization=token) + source_authorization=token, + ) - committed, uncommitted = await destination_blob_client.get_block_list('all') + committed, uncommitted = await destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # Act part 2: commit the blocks - await destination_blob_client.commit_block_list(['1', '2']) + await destination_blob_client.commit_block_list(["1", "2"]) # Assert destination blob has right content destination_blob = await destination_blob_client.download_blob() @@ -121,31 +123,28 @@ async def test_put_block_from_url_and_commit_async(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act part 1: make put block from url calls split = 4 * 1024 futures = [ dest_blob.stage_block_from_url( - block_id=1, - source_url=self.source_blob_url, - source_offset=0, - source_length=split), + block_id=1, source_url=self.source_blob_url, source_offset=0, source_length=split + ), dest_blob.stage_block_from_url( - block_id=2, - source_url=self.source_blob_url, - source_offset=split, - source_length=split)] + block_id=2, source_url=self.source_blob_url, source_offset=split, source_length=split + ), + ] await asyncio.gather(*futures) # Assert blocks - committed, uncommitted = await dest_blob.get_block_list('all') + committed, uncommitted = await dest_blob.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # Act part 2: commit the blocks - await dest_blob.commit_block_list(['1', '2']) + await dest_blob.commit_block_list(["1", "2"]) # Assert destination blob has right content content = await (await dest_blob.download_blob()).readall() @@ -160,7 +159,7 @@ async def test_put_block_from_url_and_vldte_content_md5(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) src_md5 = StorageContentValidation.get_content_md5(self.source_blob_data) @@ -170,10 +169,11 @@ async def test_put_block_from_url_and_vldte_content_md5(self, **kwargs): source_url=self.source_blob_url, source_content_md5=src_md5, source_offset=0, - source_length=8 * 1024) + source_length=8 * 1024, + ) # Assert block was staged - committed, uncommitted = await dest_blob.get_block_list('all') + committed, uncommitted = await dest_blob.get_block_list("all") assert len(uncommitted) == 1 assert len(committed) == 0 @@ -185,11 +185,12 @@ async def test_put_block_from_url_and_vldte_content_md5(self, **kwargs): source_url=self.source_blob_url, source_content_md5=fake_md5, source_offset=0, - source_length=8 * 1024) + source_length=8 * 1024, + ) assert error.value.error_code == StorageErrorCode.md5_mismatch # Assert block was not staged - committed, uncommitted = await dest_blob.get_block_list('all') + committed, uncommitted = await dest_blob.get_block_list("all") assert len(uncommitted) == 1 assert len(committed) == 0 @@ -201,7 +202,7 @@ async def test_copy_blob_sync_async(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act @@ -209,8 +210,8 @@ async def test_copy_blob_sync_async(self, **kwargs): # Assert assert copy_props is not None - assert copy_props['copy_id'] is not None - assert 'success' == copy_props['copy_status'] + assert copy_props["copy_id"] is not None + assert "success" == copy_props["copy_status"] # Verify content content = await (await dest_blob.download_blob()).readall() @@ -223,7 +224,7 @@ async def test_copy_blob_with_cold_tier_sync(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) blob_tier = StandardBlobTier.Cold @@ -242,17 +243,17 @@ async def test_sync_copy_blob_returns_vid(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act copy_props = await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True) # Assert - assert copy_props['version_id'] is not None + assert copy_props["version_id"] is not None assert copy_props is not None - assert copy_props['copy_id'] is not None - assert 'success' == copy_props['copy_status'] + assert copy_props["copy_id"] is not None + assert "success" == copy_props["copy_status"] # Verify content content = await (await dest_blob.download_blob()).readall() diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob.py b/sdk/storage/azure-storage-blob/tests/test_common_blob.py index d5577a6f9c0a..9a0d546b2289 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob.py @@ -22,7 +22,8 @@ HttpResponseError, ResourceExistsError, ResourceModifiedError, - ResourceNotFoundError) + ResourceNotFoundError, +) from azure.core.pipeline.transport import RequestsTransport from azure.storage.blob import ( AccessPolicy, @@ -47,7 +48,8 @@ generate_account_sas, generate_blob_sas, generate_container_sas, - upload_blob_to_url) + upload_blob_to_url, +) from azure.storage.blob._generated.models import RehydratePriority from devtools_testutils import FakeTokenCredential, recorded_by_proxy @@ -55,16 +57,16 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'container' -TEST_BLOB_PREFIX = 'blob' +TEST_CONTAINER_PREFIX = "container" +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ class TestStorageCommonBlob(StorageRecordedTestCase): def _setup(self, storage_account_name, key): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") if self.is_live: try: self.bsc.create_container(self.container_name, timeout=5) @@ -76,7 +78,7 @@ def _setup(self, storage_account_name, key): pass self.byte_data = self.get_random_bytes(1024) - def _create_blob(self, tags=None, data=b'', **kwargs): + def _create_blob(self, tags=None, data=b"", **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, tags=tags, overwrite=True, **kwargs) @@ -89,7 +91,7 @@ def _create_source_blob(self, data): def _setup_remote(self, storage_account_name, key): self.bsc2 = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) - self.remote_container_name = 'rmt' + self.remote_container_name = "rmt" def _teardown(self, file_path): if os.path.isfile(file_path): @@ -108,8 +110,13 @@ def _get_blob_reference(self): def _create_block_blob(self, standard_blob_tier=None, overwrite=False, tags=None): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(self.byte_data, length=len(self.byte_data), standard_blob_tier=standard_blob_tier, - overwrite=overwrite, tags=tags) + blob.upload_blob( + self.byte_data, + length=len(self.byte_data), + standard_blob_tier=standard_blob_tier, + overwrite=overwrite, + tags=tags, + ) return blob_name def _create_empty_block_blob(self, overwrite=False, tags=None): @@ -119,7 +126,7 @@ def _create_empty_block_blob(self, overwrite=False, tags=None): return blob_name def _create_remote_container(self): - self.remote_container_name = self.get_resource_name('remotectnr') + self.remote_container_name = self.get_resource_name("remotectnr") remote_container = self.bsc2.get_container_client(self.remote_container_name) try: remote_container.create_container() @@ -128,7 +135,7 @@ def _create_remote_container(self): def _create_remote_block_blob(self, blob_data=None): if not blob_data: - blob_data = b'12345678' * 1024 + blob_data = b"12345678" * 1024 source_blob_name = self._get_blob_reference() source_blob = self.bsc2.get_blob_client(self.remote_container_name, source_blob_name) source_blob.upload_blob(blob_data, overwrite=True) @@ -137,10 +144,10 @@ def _create_remote_block_blob(self, blob_data=None): def _wait_for_async_copy(self, blob): count = 0 props = blob.get_blob_properties() - while props.copy.status == 'pending': + while props.copy.status == "pending": count = count + 1 if count > 10: - self.fail('Timed out waiting for async copy to complete.') + self.fail("Timed out waiting for async copy to complete.") self.sleep(6) props = blob.get_blob_properties() return props @@ -221,7 +228,7 @@ def test_blob_snapshot_exists(self, **kwargs): # Assert assert prop - assert snapshot['snapshot'] == prop.snapshot + assert snapshot["snapshot"] == prop.snapshot @BlobPreparer() @recorded_by_proxy @@ -259,15 +266,15 @@ def test_create_blob_with_question_mark(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_name = '?ques?tion?' - blob_data = '???' + blob_name = "?ques?tion?" + blob_data = "???" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) # Assert - data = blob.download_blob(encoding='utf-8') + data = blob.download_blob(encoding="utf-8") assert data is not None assert data.readall() == blob_data @@ -280,16 +287,18 @@ def test_create_blob_with_if_tags(self, **kwargs): self._setup(storage_account_name, storage_account_key) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} blob_name = self._create_empty_block_blob(tags=tags, overwrite=True) - blob_data = '???' + blob_data = "???" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) with pytest.raises(ResourceModifiedError): blob.upload_blob(blob_data, overwrite=True, if_tags_match_condition="\"tag1\"='first tag'") - blob.upload_blob(blob_data, overwrite=True, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + blob.upload_blob( + blob_data, overwrite=True, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + ) # Assert - data = blob.download_blob(encoding='utf-8') + data = blob.download_blob(encoding="utf-8") assert data is not None assert data.readall() == blob_data @@ -302,13 +311,13 @@ def test_create_blob_with_special_chars(self, **kwargs): self._setup(storage_account_name, storage_account_key) # Act - for c in '-._ /()$=\',~': - blob_name = '{0}a{0}a{0}'.format(c) + for c in "-._ /()$=',~": + blob_name = "{0}a{0}a{0}".format(c) blob_data = c blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data, length=len(blob_data)) - data = blob.download_blob(encoding='utf-8') + data = blob.download_blob(encoding="utf-8") assert data.readall() == blob_data @BlobPreparer() @@ -320,16 +329,16 @@ def test_create_blob_and_download_blob_with_vid(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) # Act - for c in '-._ /()$=\',~': - blob_name = '{0}a{0}a{0}'.format(c) + for c in "-._ /()$=',~": + blob_name = "{0}a{0}a{0}".format(c) blob_data = c blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = blob.upload_blob(blob_data, length=len(blob_data), overwrite=True) - assert resp.get('version_id') is not None + assert resp.get("version_id") is not None - data = blob.download_blob(encoding='utf-8', version_id=resp.get('version_id')) + data = blob.download_blob(encoding="utf-8", version_id=resp.get("version_id")) assert data.readall() == blob_data - assert data.properties.get('version_id') is not None + assert data.properties.get("version_id") is not None @BlobPreparer() @recorded_by_proxy @@ -340,14 +349,14 @@ def test_create_blob_with_lease_id(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act - data = b'hello world again' + data = b"hello world again" resp = blob.upload_blob(data, length=len(data), lease=lease) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None content = blob.download_blob(lease=lease).readall() assert content == data @@ -364,11 +373,12 @@ def gen(): yield "hello" yield "world!" yield " eom" + blob = self.bsc.get_blob_client(self.container_name, "gen_blob") resp = blob.upload_blob(data=gen()) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None content = blob.download_blob().readall() assert content == b"helloworld! eom" @@ -382,7 +392,7 @@ def test_create_blob_with_requests(self, **kwargs): self._setup(storage_account_name, storage_account_key) # Create a blob to download with requests using SAS - data = b'a' * 1024 * 1024 + data = b"a" * 1024 * 1024 blob = self._create_blob(data=data) sas = self.generate_sas( @@ -396,12 +406,12 @@ def test_create_blob_with_requests(self, **kwargs): ) # Act - uri = blob.url + '?' + sas + uri = blob.url + "?" + sas data = requests.get(uri, stream=True) - blob2 = self.bsc.get_blob_client(self.container_name, blob.blob_name + '_copy') + blob2 = self.bsc.get_blob_client(self.container_name, blob.blob_name + "_copy") resp = blob2.upload_blob(data=data.raw) - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy @@ -411,15 +421,15 @@ def test_create_blob_with_metadata(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - metadata={'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act - data = b'hello world' + data = b"hello world" blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = blob.upload_blob(data, length=len(data), metadata=metadata) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None md = blob.get_blob_properties().metadata assert md == metadata @@ -430,8 +440,8 @@ def test_upload_blob_with_dictionary(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_name = 'test_blob' - blob_data = {'hello': 'world'} + blob_name = "test_blob" + blob_data = {"hello": "world"} # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -460,7 +470,7 @@ def data_generator(): data = blob.download_blob().readall() # Assert - assert data == raw_data*2 + assert data == raw_data * 2 @pytest.mark.live_test_only @BlobPreparer() @@ -475,12 +485,12 @@ def test_upload_blob_from_pipe(self, **kwargs): reader_fd, writer_fd = os.pipe() - with os.fdopen(writer_fd, 'wb') as writer: + with os.fdopen(writer_fd, "wb") as writer: writer.write(data) # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - with os.fdopen(reader_fd, mode='rb') as reader: + with os.fdopen(reader_fd, mode="rb") as reader: blob.upload_blob(data=reader, overwrite=True) blob_data = blob.download_blob().readall() @@ -514,8 +524,7 @@ def test_get_blob_with_snapshot(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=blob.create_snapshot()) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob.create_snapshot()) # Act data = snapshot.download_blob() @@ -533,10 +542,9 @@ def test_get_blob_with_snapshot_previous(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=blob.create_snapshot()) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob.create_snapshot()) - upload_data = b'hello world again' + upload_data = b"hello world again" blob.upload_blob(upload_data, length=len(upload_data), overwrite=True) # Act @@ -545,7 +553,7 @@ def test_get_blob_with_snapshot_previous(self, **kwargs): # Assert assert blob_previous.readall() == self.byte_data - assert blob_latest.readall() == b'hello world again' + assert blob_latest.readall() == b"hello world again" @BlobPreparer() @recorded_by_proxy @@ -572,7 +580,7 @@ def test_get_blob_with_lease(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act data = blob.download_blob(lease=lease) @@ -609,15 +617,13 @@ def test_set_blob_properties_with_existing_blob(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.set_http_headers( - content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), ) # Assert props = blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy @@ -632,21 +638,19 @@ def test_set_blob_properties_with_if_tags(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) with pytest.raises(ResourceModifiedError): - blob.set_http_headers(content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), - if_tags_match_condition="\"tag1\"='first tag'") + blob.set_http_headers( + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), + if_tags_match_condition="\"tag1\"='first tag'", + ) blob.set_http_headers( - content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), - if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", ) # Assert props = blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy @@ -660,15 +664,14 @@ def test_set_blob_properties_with_blob_settings_param(self, **kwargs): props = blob.get_blob_properties() # Act - props.content_settings.content_language = 'spanish' - props.content_settings.content_disposition = 'inline' + props.content_settings.content_language = "spanish" + props.content_settings.content_disposition = "inline" blob.set_http_headers(content_settings=props.content_settings) # Assert props = blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' - + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy @@ -687,7 +690,7 @@ def test_get_blob_properties(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.lease.status == 'unlocked' + assert props.lease.status == "unlocked" assert props.creation_time is not None @BlobPreparer() @@ -708,7 +711,7 @@ def test_get_blob_properties_returns_rehydrate_priority(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.rehydrate_priority == 'High' + assert props.rehydrate_priority == "High" # This test is to validate that the ErrorCode is retrieved from the header during a # HEAD request. @@ -725,11 +728,11 @@ def test_get_blob_properties_fail(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=1) with pytest.raises(HttpResponseError) as e: - blob.get_blob_properties() # Invalid snapshot value of 1 + blob.get_blob_properties() # Invalid snapshot value of 1 # Assert # TODO: No error code returned - #assert StorageErrorCode.invalid_query_parameter_value == e.exception.error_code + # assert StorageErrorCode.invalid_query_parameter_value == e.exception.error_code # This test is to validate that the ErrorCode is retrieved from the header during a # GET request. This is preferred to relying on the ErrorCode in the body. @@ -745,11 +748,11 @@ def test_get_blob_metadata_fail(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=1) with pytest.raises(HttpResponseError) as e: - blob.get_blob_properties().metadata # Invalid snapshot value of 1 + blob.get_blob_properties().metadata # Invalid snapshot value of 1 # Assert # TODO: No error code returned - #assert StorageErrorCode.invalid_query_parameter_value == e.exception.error_code + # assert StorageErrorCode.invalid_query_parameter_value == e.exception.error_code @BlobPreparer() @recorded_by_proxy @@ -794,9 +797,9 @@ def test_list_blobs_server_encryption(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob_list = container.list_blobs() - #Act + # Act - #Assert + # Assert for blob in blob_list: assert blob.server_encrypted @@ -810,13 +813,13 @@ def test_no_server_encryption(self, **kwargs): blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - #Act + # Act def callback(response): - response.http_response.headers['x-ms-server-encrypted'] = 'false' + response.http_response.headers["x-ms-server-encrypted"] = "false" props = blob.get_blob_properties(raw_response_hook=callback) - #Assert + # Assert assert not props.server_encrypted @BlobPreparer() @@ -830,7 +833,7 @@ def test_get_blob_properties_with_snapshot(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob = self.bsc.get_blob_client(self.container_name, blob_name) res = blob.create_snapshot() - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 2 # Act @@ -851,7 +854,7 @@ def test_get_blob_properties_with_leased_blob(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act props = blob.get_blob_properties() @@ -860,9 +863,9 @@ def test_get_blob_properties_with_leased_blob(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.lease.status == 'locked' - assert props.lease.state == 'leased' - assert props.lease.duration == 'infinite' + assert props.lease.status == "locked" + assert props.lease.state == "leased" + assert props.lease.duration == "infinite" @BlobPreparer() @recorded_by_proxy @@ -887,7 +890,7 @@ def test_set_blob_metadata_with_upper_case(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - metadata = {'hello': ' world ', ' number ': '42', 'UP': 'UPval'} + metadata = {"hello": " world ", " number ": "42", "UP": "UPval"} blob_name = self._create_block_blob() # Act @@ -897,10 +900,10 @@ def test_set_blob_metadata_with_upper_case(self, **kwargs): # Assert md = blob.get_blob_properties().metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy @@ -910,7 +913,7 @@ def test_set_blob_metadata_with_if_tags(self, **kwargs): self._setup(storage_account_name, storage_account_key) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} - metadata = {'hello': ' world ', ' number ': '42', 'UP': 'UPval'} + metadata = {"hello": " world ", " number ": "42", "UP": "UPval"} blob_name = self._create_block_blob(tags=tags, overwrite=True) # Act @@ -922,10 +925,10 @@ def test_set_blob_metadata_with_if_tags(self, **kwargs): # Assert md = blob.get_blob_properties().metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy @@ -934,7 +937,7 @@ def test_set_blob_metadata_returns_vid(self, **kwargs): versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") self._setup(versioned_storage_account_name, versioned_storage_account_key) - metadata = {'hello': 'world', 'number': '42', 'UP': 'UPval'} + metadata = {"hello": "world", "number": "42", "UP": "UPval"} blob_name = self._create_block_blob() # Act @@ -942,13 +945,13 @@ def test_set_blob_metadata_returns_vid(self, **kwargs): resp = blob.set_blob_metadata(metadata) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None md = blob.get_blob_properties().metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy @@ -983,7 +986,11 @@ def test_delete_blob_with_if_tags(self, **kwargs): with pytest.raises(ResourceModifiedError): blob.delete_blob(if_tags_match_condition="\"tag1\"='first tag'") - resp = blob.delete_blob(etag=prop.etag, match_condition=MatchConditions.IfNotModified, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + resp = blob.delete_blob( + etag=prop.etag, + match_condition=MatchConditions.IfNotModified, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) # Assert assert resp is None @@ -998,13 +1005,13 @@ def test_delete_specific_blob_version(self, **kwargs): blob_name = self.get_resource_name("blobtodelete") blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - resp = blob_client.upload_blob(b'abc', overwrite=True) - assert resp['version_id'] is not None + resp = blob_client.upload_blob(b"abc", overwrite=True) + assert resp["version_id"] is not None - blob_client.upload_blob(b'abc', overwrite=True) + blob_client.upload_blob(b"abc", overwrite=True) # Act - resp = blob_client.delete_blob(version_id=resp['version_id']) + resp = blob_client.delete_blob(version_id=resp["version_id"]) blob_list = list(self.bsc.get_container_client(self.container_name).list_blobs(include="versions")) @@ -1021,11 +1028,11 @@ def test_delete_blob_version_with_blob_sas(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) blob_name = self._create_block_blob() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - resp = blob_client.upload_blob(b'abcde', overwrite=True) + resp = blob_client.upload_blob(b"abcde", overwrite=True) - version_id = resp['version_id'] + version_id = resp["version_id"] assert version_id is not None - blob_client.upload_blob(b'abc', overwrite=True) + blob_client.upload_blob(b"abc", overwrite=True) token = self.generate_sas( generate_blob_sas, @@ -1075,15 +1082,14 @@ def test_delete_blob_snapshot(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=blob.create_snapshot()) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob.create_snapshot()) # Act snapshot.delete_blob() # Assert container = self.bsc.get_container_client(self.container_name) - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 1 assert blobs[0].name == blob_name assert blobs[0].snapshot is None @@ -1100,11 +1106,11 @@ def test_delete_blob_snapshots(self, **kwargs): blob.create_snapshot() # Act - blob.delete_blob(delete_snapshots='only') + blob.delete_blob(delete_snapshots="only") # Assert container = self.bsc.get_container_client(self.container_name) - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 1 assert blobs[0].snapshot is None @@ -1120,17 +1126,17 @@ def test_create_blob_snapshot_returns_vid(self, **kwargs): blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = blob.create_snapshot() - blobs = list(container.list_blobs(include='versions')) + blobs = list(container.list_blobs(include="versions")) - assert resp['version_id'] is not None + assert resp["version_id"] is not None # Both create blob and create snapshot will create a new version assert len(blobs) >= 2 # Act - blob.delete_blob(delete_snapshots='include') + blob.delete_blob(delete_snapshots="include") # Assert - blobs = list(container.list_blobs(include=['snapshots', 'versions'])) + blobs = list(container.list_blobs(include=["snapshots", "versions"])) # versions are not deleted so blob lists shouldn't be empty assert len(blobs) > 0 assert blobs[0].snapshot is None @@ -1147,14 +1153,14 @@ def test_delete_blob_with_snapshots(self, **kwargs): blob.create_snapshot() # Act - #with pytest.raises(HttpResponseError): + # with pytest.raises(HttpResponseError): # blob.delete_blob() - blob.delete_blob(delete_snapshots='include') + blob.delete_blob(delete_snapshots="include") # Assert container = self.bsc.get_container_client(self.container_name) - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 0 @BlobPreparer() @@ -1171,7 +1177,7 @@ def test_soft_delete_blob_without_snapshots(self, **kwargs): # Soft delete the blob blob.delete_blob() - blob_list = list(container.list_blobs(include='deleted')) + blob_list = list(container.list_blobs(include="deleted")) # Assert assert len(blob_list) == 1 @@ -1185,7 +1191,7 @@ def test_soft_delete_blob_without_snapshots(self, **kwargs): # Restore blob with undelete blob.undelete_blob() - blob_list = list(container.list_blobs(include='deleted')) + blob_list = list(container.list_blobs(include="deleted")) # Assert assert len(blob_list) == 1 @@ -1204,12 +1210,11 @@ def test_soft_delete_single_blob_snapshot(self, **kwargs): blob_snapshot_2 = blob.create_snapshot() # Soft delete blob_snapshot_1 - snapshot_1 = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=blob_snapshot_1) + snapshot_1 = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob_snapshot_1) snapshot_1.delete_blob() with pytest.raises(ValueError): - snapshot_1.delete_blob(delete_snapshots='only') + snapshot_1.delete_blob(delete_snapshots="only") container = self.bsc.get_container_client(self.container_name) blob_list = list(container.list_blobs(include=["snapshots", "deleted"])) @@ -1217,13 +1222,13 @@ def test_soft_delete_single_blob_snapshot(self, **kwargs): # Assert assert len(blob_list) == 3 for listedblob in blob_list: - if listedblob.snapshot == blob_snapshot_1['snapshot']: + if listedblob.snapshot == blob_snapshot_1["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) else: self._assert_blob_not_soft_deleted(listedblob) # list_blobs should not list soft deleted blob snapshots if Include(deleted=True) is not specified - blob_list = list(container.list_blobs(include='snapshots')) + blob_list = list(container.list_blobs(include="snapshots")) # Assert assert len(blob_list) == 2 @@ -1250,16 +1255,16 @@ def test_soft_delete_only_snapshots_of_blob(self, **kwargs): blob_snapshot_2 = blob.create_snapshot() # Soft delete all snapshots - blob.delete_blob(delete_snapshots='only') + blob.delete_blob(delete_snapshots="only") container = self.bsc.get_container_client(self.container_name) blob_list = list(container.list_blobs(include=["snapshots", "deleted"])) # Assert assert len(blob_list) == 3 for listedblob in blob_list: - if listedblob.snapshot == blob_snapshot_1['snapshot']: + if listedblob.snapshot == blob_snapshot_1["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) - elif listedblob.snapshot == blob_snapshot_2['snapshot']: + elif listedblob.snapshot == blob_snapshot_2["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) else: self._assert_blob_not_soft_deleted(listedblob) @@ -1292,7 +1297,7 @@ def test_soft_delete_blob_including_all_snapshots(self, **kwargs): blob_snapshot_2 = blob.create_snapshot() # Soft delete blob and all snapshots - blob.delete_blob(delete_snapshots='include') + blob.delete_blob(delete_snapshots="include") container = self.bsc.get_container_client(self.container_name) blob_list = list(container.list_blobs(include=["snapshots", "deleted"])) @@ -1325,7 +1330,7 @@ def test_soft_delete_with_leased_blob(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Soft delete the blob without lease_id should fail with pytest.raises(HttpResponseError): @@ -1367,16 +1372,20 @@ def test_start_copy_from_url_with_oauth(self, **kwargs): source_blob_client = self._create_source_blob(data=source_blob_data) # Create destination blob destination_blob_client = self._create_blob() - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = "Bearer {}".format( + self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token + ) with pytest.raises(HttpResponseError): destination_blob_client.start_copy_from_url(source_blob_client.url, requires_sync=True) with pytest.raises(ValueError): destination_blob_client.start_copy_from_url( - source_blob_client.url, source_authorization=token, requires_sync=False) + source_blob_client.url, source_authorization=token, requires_sync=False + ) destination_blob_client.start_copy_from_url( - source_blob_client.url, source_authorization=token, requires_sync=True) + source_blob_client.url, source_authorization=token, requires_sync=True + ) destination_blob_data = destination_blob_client.download_blob().readall() assert source_blob_data == destination_blob_data @@ -1391,17 +1400,18 @@ def test_copy_blob_with_existing_blob(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = copyblob.start_copy_from_url(sourceblob) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = copyblob.download_blob().readall() assert copy_content == self.byte_data @@ -1416,42 +1426,48 @@ def test_copy_blob_with_immutability_policy(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = self._create_block_blob() # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(container_name, 'blob1copy') - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - copy = copyblob.start_copy_from_url(sourceblob, immutability_policy=immutability_policy, - legal_hold=True) + copyblob = self.bsc.get_blob_client(container_name, "blob1copy") + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + copy = copyblob.start_copy_from_url(sourceblob, immutability_policy=immutability_policy, legal_hold=True) download_resp = copyblob.download_blob() assert download_resp.readall() == self.byte_data - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) if self.is_live: copyblob.delete_immutability_policy() copyblob.set_legal_hold(False) copyblob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -1468,10 +1484,11 @@ def test_async_copy_blob_with_if_tags(self, **kwargs): tags1 = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copyblob.upload_blob("abc", overwrite=True) copyblob.set_blob_tags(tags=tags1) @@ -1492,13 +1509,15 @@ def test_async_copy_blob_with_if_tags(self, **kwargs): with pytest.raises(ResourceModifiedError): copyblob.start_copy_from_url(sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='abc'") - copy = copyblob.start_copy_from_url(sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='first tag'") + copy = copyblob.start_copy_from_url( + sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='first tag'" + ) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None with pytest.raises(ResourceModifiedError): copyblob.download_blob(if_tags_match_condition="\"tag1\"='abc1'").readall() @@ -1516,18 +1535,19 @@ def test_copy_blob_returns_vid(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = copyblob.start_copy_from_url(sourceblob) # Assert assert copy is not None - assert copy['version_id'] is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["version_id"] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = copyblob.download_blob().readall() assert copy_content == self.byte_data @@ -1544,10 +1564,11 @@ def test_copy_blob_with_blob_tier_specified(self, **kwargs): self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") blob_tier = StandardBlobTier.Cool copyblob.start_copy_from_url(sourceblob, standard_blob_tier=blob_tier) @@ -1567,24 +1588,25 @@ def test_copy_blob_with_rehydrate_priority(self, **kwargs): blob_name = self._create_block_blob() # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) blob_tier = StandardBlobTier.Archive rehydrate_priority = RehydratePriority.high - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') - copy = copyblob.start_copy_from_url(sourceblob, - standard_blob_tier=blob_tier, - rehydrate_priority=rehydrate_priority) + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") + copy = copyblob.start_copy_from_url( + sourceblob, standard_blob_tier=blob_tier, rehydrate_priority=rehydrate_priority + ) copy_blob_properties = copyblob.get_blob_properties() copyblob.set_standard_blob_tier(StandardBlobTier.Hot) second_resp = copyblob.get_blob_properties() # Assert assert copy is not None - assert copy.get('copy_id') is not None + assert copy.get("copy_id") is not None assert copy_blob_properties.blob_tier == blob_tier - assert second_resp.archive_status == 'rehydrate-pending-to-hot' + assert second_resp.archive_status == "rehydrate-pending-to-hot" @BlobPreparer() @recorded_by_proxy @@ -1600,7 +1622,7 @@ def test_copy_blob_async_private_blob_no_sas(self, **kwargs): source_blob = self._create_remote_block_blob() # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) # Assert @@ -1616,7 +1638,7 @@ def test_copy_blob_async_private_blob_with_sas(self, **kwargs): secondary_storage_account_key = kwargs.pop("secondary_storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'12345678' * 1024 + data = b"12345678" * 1024 self._setup_remote(secondary_storage_account_name, secondary_storage_account_key) self._create_remote_container() source_blob = self._create_remote_block_blob(blob_data=data) @@ -1633,13 +1655,13 @@ def test_copy_blob_async_private_blob_with_sas(self, **kwargs): blob = BlobClient.from_blob_url(source_blob.url, credential=sas_token) # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) copy_resp = target_blob.start_copy_from_url(blob.url) # Assert props = self._wait_for_async_copy(target_blob) - assert props.copy.status == 'success' + assert props.copy.status == "success" actual_data = target_blob.download_blob() assert actual_data.readall() == data @@ -1651,21 +1673,21 @@ def test_abort_copy_blob(self, **kwargs): self._setup(storage_account_name, storage_account_key) source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = self.bsc.get_blob_client(self.container_name, '59466-0.txt') + copied_blob = self.bsc.get_blob_client(self.container_name, "59466-0.txt") # Act copy = copied_blob.start_copy_from_url(source_blob) - assert copy['copy_status'] == 'pending' + assert copy["copy_status"] == "pending" try: copied_blob.abort_copy(copy) props = self._wait_for_async_copy(copied_blob) - assert props.copy.status == 'aborted' + assert props.copy.status == "aborted" # Assert actual_data = copied_blob.download_blob() assert actual_data.readall() == b"" - assert actual_data.properties.copy.status == 'aborted' + assert actual_data.properties.copy.status == "aborted" # In the Live test pipeline, the copy occasionally finishes before it can be aborted. # Catch and assert on error code to prevent this test from failing. @@ -1683,7 +1705,7 @@ def test_abort_copy_blob_with_synchronous_copy_fails(self, **kwargs): source_blob = self.bsc.get_blob_client(self.container_name, source_blob_name) # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) copy_resp = target_blob.start_copy_from_url(source_blob.url) @@ -1691,7 +1713,7 @@ def test_abort_copy_blob_with_synchronous_copy_fails(self, **kwargs): target_blob.abort_copy(copy_resp) # Assert - assert copy_resp['copy_status'] == 'success' + assert copy_resp["copy_status"] == "success" @BlobPreparer() @recorded_by_proxy @@ -1708,7 +1730,7 @@ def test_snapshot_blob(self, **kwargs): # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy @@ -1721,9 +1743,9 @@ def test_lease_blob_acquire_and_release(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") lease.release() - lease2 = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease2 = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Assert assert lease is not None @@ -1740,13 +1762,13 @@ def test_lease_blob_with_duration(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) - resp = blob.upload_blob(b'hello 2', length=7, lease=lease) + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) + resp = blob.upload_blob(b"hello 2", length=7, lease=lease) self.sleep(20) # Assert with pytest.raises(HttpResponseError): - blob.upload_blob(b'hello 3', length=7, lease=lease) + blob.upload_blob(b"hello 3", length=7, lease=lease) @BlobPreparer() @recorded_by_proxy @@ -1759,7 +1781,7 @@ def test_lease_blob_with_proposed_lease_id(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0' + lease_id = "a0e6c241-96ea-45a3-a44b-6ae868bc14d0" lease = blob.acquire_lease(lease_id=lease_id) # Assert @@ -1776,8 +1798,8 @@ def test_lease_blob_change_lease_id(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0' - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = "a0e6c241-96ea-45a3-a44b-6ae868bc14d0" + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") first_lease_id = lease.id lease.change(lease_id) lease.renew() @@ -1797,19 +1819,19 @@ def test_lease_blob_break_period(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) lease_time = lease.break_lease(lease_break_period=5) - resp = blob.upload_blob(b'hello 2', length=7, lease=lease) + resp = blob.upload_blob(b"hello 2", length=7, lease=lease) self.sleep(5) with pytest.raises(HttpResponseError): - blob.upload_blob(b'hello 3', length=7, lease=lease) + blob.upload_blob(b"hello 3", length=7, lease=lease) # Assert assert lease.id is not None assert lease_time is not None - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy @@ -1822,7 +1844,7 @@ def test_lease_blob_acquire_and_renew(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") first_id = lease.id lease.renew() @@ -1838,11 +1860,11 @@ def test_lease_blob_acquire_twice_fails(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act with pytest.raises(HttpResponseError): - blob.acquire_lease(lease_id='00000000-1111-2222-3333-555555555555') + blob.acquire_lease(lease_id="00000000-1111-2222-3333-555555555555") # Assert assert lease.id is not None @@ -1854,15 +1876,15 @@ def test_unicode_get_blob_unicode_name(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_name = '啊齄丂狛狜' + blob_name = "啊齄丂狛狜" blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(b'hello world') + blob.upload_blob(b"hello world") # Act data = blob.download_blob() # Assert - assert data.readall() == b'hello world' + assert data.readall() == b"hello world" @BlobPreparer() @recorded_by_proxy @@ -1875,11 +1897,11 @@ def test_create_blob_blob_unicode_data(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - data = u'hello world啊齄丂狛狜' + data = "hello world啊齄丂狛狜" resp = blob.upload_blob(data) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -1904,7 +1926,7 @@ def test_sas_access_blob(self, **kwargs): # Act service = BlobClient.from_blob_url(blob.url, credential=token) - #self._set_test_proxy(service, self.settings) + # self._set_test_proxy(service, self.settings) content = service.download_blob().readall() # Assert @@ -1922,9 +1944,18 @@ def test_sas_access_blob_snapshot(self, **kwargs): blob_snapshot = blob_client.create_snapshot() blob_snapshot_client = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob_snapshot) - permission = BlobSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - permanent_delete=True, list=True, add=True, create=True, update=True) - assert 'y' in str(permission) + permission = BlobSasPermissions( + read=True, + write=True, + delete=True, + delete_previous_version=True, + permanent_delete=True, + list=True, + add=True, + create=True, + update=True, + ) + assert "y" in str(permission) token = self.generate_sas( generate_blob_sas, blob_snapshot_client.account_name, @@ -1963,14 +1994,14 @@ def test_sas_signed_identifier(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob = self.bsc.get_blob_client(self.container_name, blob_name) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow() - timedelta(hours=1)) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + start = self.get_datetime_variable(variables, "start", datetime.utcnow() - timedelta(hours=1)) + expiry = self.get_datetime_variable(variables, "expiry", datetime.utcnow() + timedelta(hours=1)) access_policy = AccessPolicy() access_policy.start = start access_policy.expiry = expiry access_policy.permission = BlobSasPermissions(read=True) - identifiers = {'testid': access_policy} + identifiers = {"testid": access_policy} container.set_container_access_policy(identifiers) @@ -1981,11 +2012,12 @@ def test_sas_signed_identifier(self, **kwargs): blob.blob_name, snapshot=blob.snapshot, account_key=blob.credential.account_key, - policy_id='testid') + policy_id="testid", + ) # Act service = BlobClient.from_blob_url(blob.url, credential=token) - #self._set_test_proxy(service, self.settings) + # self._set_test_proxy(service, self.settings) result = service.download_blob().readall() # Assert @@ -2012,10 +2044,8 @@ def test_account_sas(self, **kwargs): ) # Act - blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=token) - container = ContainerClient( - self.bsc.url, container_name=self.container_name, credential=token) + blob = BlobClient(self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=token) + container = ContainerClient(self.bsc.url, container_name=self.container_name, credential=token) container_props = container.get_container_properties() blob_props = blob.get_blob_properties() @@ -2042,9 +2072,19 @@ def test_blob_service_sas(self, **kwargs): container.container_name, account_key=container.credential.account_key, permission=ContainerSasPermissions( - read=True, write=True, delete=True, list=True, delete_previous_version=True, - tag=True, add=True, create=True, permanent_delete=True, filter_by_tags=True, move=True, - execute=True, set_immutability_policy=True + read=True, + write=True, + delete=True, + list=True, + delete_previous_version=True, + tag=True, + add=True, + create=True, + permanent_delete=True, + filter_by_tags=True, + move=True, + execute=True, + set_immutability_policy=True, ), expiry=datetime.utcnow() + timedelta(hours=1), ) @@ -2057,8 +2097,17 @@ def test_blob_service_sas(self, **kwargs): snapshot=blob.snapshot, account_key=blob.credential.account_key, permission=BlobSasPermissions( - read=True, add=True, create=True, write=True, delete=True, delete_previous_version=True, - permanent_delete=True, tag=True, move=True, execute=True, set_immutability_policy=True + read=True, + add=True, + create=True, + write=True, + delete=True, + delete_previous_version=True, + permanent_delete=True, + tag=True, + move=True, + execute=True, + set_immutability_policy=True, ), expiry=datetime.utcnow() + timedelta(hours=1), ) @@ -2087,11 +2136,11 @@ def test_multiple_services_sas(self, **kwargs): ResourceTypes(container=True, object=True, service=True), AccountSasPermissions(read=True, list=True), datetime.utcnow() + timedelta(hours=1), - services=Services(blob=True, fileshare=True) + services=Services(blob=True, fileshare=True), ) # Assert - assert 'ss=bf' in token + assert "ss=bf" in token @pytest.mark.live_test_only @BlobPreparer() @@ -2103,16 +2152,19 @@ def test_set_immutability_policy_using_sas(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob_client = self.bsc.get_blob_client(container_name, blob_name) blob_client.upload_blob(b"abc", overwrite=True) @@ -2126,17 +2178,19 @@ def test_set_immutability_policy_using_sas(self, **kwargs): datetime.utcnow() + timedelta(hours=1), ) blob = BlobClient( - self.bsc.url, container_name= container_name, blob_name=blob_name, credential=account_sas_token) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) + self.bsc.url, container_name=container_name, blob_name=blob_name, credential=account_sas_token + ) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) resp_with_account_sas = blob.set_immutability_policy(immutability_policy=immutability_policy) blob_response = requests.get(blob.url) # Assert response using account sas assert blob_response.ok - assert resp_with_account_sas['immutability_policy_until_date'] is not None - assert resp_with_account_sas['immutability_policy_mode'] is not None + assert resp_with_account_sas["immutability_policy_until_date"] is not None + assert resp_with_account_sas["immutability_policy_mode"] is not None # Acting using container sas container_sas_token = self.generate_sas( @@ -2148,15 +2202,17 @@ def test_set_immutability_policy_using_sas(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), ) blob1 = BlobClient( - self.bsc.url, container_name=container_name, blob_name=blob_name, credential=container_sas_token) + self.bsc.url, container_name=container_name, blob_name=blob_name, credential=container_sas_token + ) - expiry_time2 = self.get_datetime_variable(variables, 'expiry_time2', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time2, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) + expiry_time2 = self.get_datetime_variable(variables, "expiry_time2", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time2, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) resp_with_container_sas = blob1.set_immutability_policy(immutability_policy=immutability_policy) # Assert response using container sas - assert resp_with_container_sas['immutability_policy_until_date'] is not None - assert resp_with_container_sas['immutability_policy_mode'] is not None + assert resp_with_container_sas["immutability_policy_until_date"] is not None + assert resp_with_container_sas["immutability_policy_mode"] is not None # Acting using blob sas blob_sas_token = self.generate_sas( @@ -2168,23 +2224,25 @@ def test_set_immutability_policy_using_sas(self, **kwargs): permission=BlobSasPermissions(read=True, set_immutability_policy=True), expiry=datetime.utcnow() + timedelta(hours=1), ) - blob2 = BlobClient( - self.bsc.url, container_name=container_name, blob_name=blob_name, credential=blob_sas_token) + blob2 = BlobClient(self.bsc.url, container_name=container_name, blob_name=blob_name, credential=blob_sas_token) - expiry_time3 = self.get_datetime_variable(variables, 'expiry_time3', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time3, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) + expiry_time3 = self.get_datetime_variable(variables, "expiry_time3", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time3, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) resp_with_blob_sas = blob2.set_immutability_policy(immutability_policy=immutability_policy) # Assert response using blob sas - assert resp_with_blob_sas['immutability_policy_until_date'] is not None - assert resp_with_blob_sas['immutability_policy_mode'] is not None + assert resp_with_blob_sas["immutability_policy_until_date"] is not None + assert resp_with_blob_sas["immutability_policy_mode"] is not None if self.is_live: blob_client.delete_immutability_policy() blob_client.set_legal_hold(False) blob_client.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -2197,9 +2255,10 @@ def test_account_sas_credential(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() - account_sas_permission = AccountSasPermissions(read=True, write=True, delete=True, add=True, - permanent_delete=True, list=True) - assert 'y' in str(account_sas_permission) + account_sas_permission = AccountSasPermissions( + read=True, write=True, delete=True, add=True, permanent_delete=True, list=True + ) + assert "y" in str(account_sas_permission) token = self.generate_sas( generate_account_sas, @@ -2212,9 +2271,11 @@ def test_account_sas_credential(self, **kwargs): # Act blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=AzureSasCredential(token)) + self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=AzureSasCredential(token) + ) container = ContainerClient( - self.bsc.url, container_name=self.container_name, credential=AzureSasCredential(token)) + self.bsc.url, container_name=self.container_name, credential=AzureSasCredential(token) + ) blob_properties = blob.get_blob_properties() container_properties = container.get_container_properties() @@ -2253,8 +2314,8 @@ def test_get_user_delegation_key(self, **kwargs): # Action 1: make sure token works service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + start = self.get_datetime_variable(variables, "start", datetime.utcnow()) + expiry = self.get_datetime_variable(variables, "expiry", datetime.utcnow() + timedelta(hours=1)) user_delegation_key_1 = service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) user_delegation_key_2 = service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) @@ -2288,12 +2349,12 @@ def test_user_delegation_sas_for_blob(self, **kwargs): token_credential = self.get_credential(BlobServiceClient) service_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + start = self.get_datetime_variable(variables, "start", datetime.utcnow()) + expiry = self.get_datetime_variable(variables, "expiry", datetime.utcnow() + timedelta(hours=1)) user_delegation_key = service_client.get_user_delegation_key(start, expiry) - container_client = service_client.create_container(self.get_resource_name('oauthcontainer')) - blob_client = container_client.get_blob_client(self.get_resource_name('oauthblob')) + container_client = service_client.create_container(self.get_resource_name("oauthcontainer")) + blob_client = container_client.get_blob_client(self.get_resource_name("oauthblob")) blob_client.upload_blob(byte_data, length=len(byte_data)) token = self.generate_sas( @@ -2350,7 +2411,7 @@ def test_token_credential_blob(self, **kwargs): # Setup container_name = self._get_container_reference() blob_name = self._get_blob_reference() - blob_data = b'Helloworld' + blob_data = b"Helloworld" token_credential = self.get_credential(BlobServiceClient) service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) @@ -2381,9 +2442,9 @@ def test_token_credential_with_batch_operation(self, **kwargs): container = service.get_container_client(container_name) try: container.create_container() - container.upload_blob(blob_name + '1', b'HelloWorld') - container.upload_blob(blob_name + '2', b'HelloWorld') - container.upload_blob(blob_name + '3', b'HelloWorld') + container.upload_blob(blob_name + "1", b"HelloWorld") + container.upload_blob(blob_name + "2", b"HelloWorld") + container.upload_blob(blob_name + "3", b"HelloWorld") delete_batch = [] blob_list = container.list_blobs(name_starts_with=blob_name) @@ -2443,11 +2504,11 @@ def test_shared_read_access_blob_with_content_query_params(self, **kwargs): account_key=blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), - cache_control='no-cache', - content_disposition='inline', - content_encoding='utf-8', - content_language='fr', - content_type='text', + cache_control="no-cache", + content_disposition="inline", + content_encoding="utf-8", + content_language="fr", + content_type="text", ) sas_blob = BlobClient.from_blob_url(blob.url, credential=token) @@ -2457,11 +2518,11 @@ def test_shared_read_access_blob_with_content_query_params(self, **kwargs): # Assert response.raise_for_status() assert self.byte_data == response.content - assert response.headers['cache-control'] == 'no-cache' - assert response.headers['content-disposition'] == 'inline' - assert response.headers['content-encoding'] == 'utf-8' - assert response.headers['content-language'] == 'fr' - assert response.headers['content-type'] == 'text' + assert response.headers["cache-control"] == "no-cache" + assert response.headers["content-disposition"] == "inline" + assert response.headers["content-encoding"] == "utf-8" + assert response.headers["content-language"] == "fr" + assert response.headers["content-type"] == "text" @pytest.mark.live_test_only @BlobPreparer() @@ -2470,7 +2531,7 @@ def test_shared_write_access_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - updated_data = b'updated blob data' + updated_data = b"updated blob data" blob_name = self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2487,7 +2548,7 @@ def test_shared_write_access_blob(self, **kwargs): sas_blob = BlobClient.from_blob_url(blob.url, credential=token) # Act - headers = {'x-ms-blob-type': 'BlockBlob'} + headers = {"x-ms-blob-type": "BlockBlob"} response = requests.put(sas_blob.url, headers=headers, data=updated_data) # Assert @@ -2542,15 +2603,15 @@ def test_get_account_information(self, **kwargs): bc_info = blob_client.get_account_information() # Assert - assert bsc_info.get('sku_name') is not None - assert bsc_info.get('account_kind') is not None - assert not bsc_info.get('is_hns_enabled') - assert cc_info.get('sku_name') is not None - assert cc_info.get('account_kind') is not None - assert not cc_info.get('is_hns_enabled') - assert bc_info.get('sku_name') is not None - assert bc_info.get('account_kind') is not None - assert not bc_info.get('is_hns_enabled') + assert bsc_info.get("sku_name") is not None + assert bsc_info.get("account_kind") is not None + assert not bsc_info.get("is_hns_enabled") + assert cc_info.get("sku_name") is not None + assert cc_info.get("account_kind") is not None + assert not cc_info.get("is_hns_enabled") + assert bc_info.get("sku_name") is not None + assert bc_info.get("account_kind") is not None + assert not bc_info.get("is_hns_enabled") @BlobPreparer() @recorded_by_proxy @@ -2588,32 +2649,30 @@ def test_get_account_information_sas(self, **kwargs): ) # Act - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=account_token) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=account_token) bsc_info = bsc.get_account_information() container_client = ContainerClient( - self.account_url(storage_account_name, "blob"), - self.container_name, - credential=container_token) + self.account_url(storage_account_name, "blob"), self.container_name, credential=container_token + ) cc_info = container_client.get_account_information() blob_client = BlobClient( self.account_url(storage_account_name, "blob"), self.container_name, self._get_blob_reference(), - credential=blob_token) + credential=blob_token, + ) bc_info = blob_client.get_account_information() # Assert - assert bsc_info.get('sku_name') is not None - assert bsc_info.get('account_kind') is not None - assert not bsc_info.get('is_hns_enabled') - assert cc_info.get('sku_name') is not None - assert cc_info.get('account_kind') is not None - assert not cc_info.get('is_hns_enabled') - assert bc_info.get('sku_name') is not None - assert bc_info.get('account_kind') is not None - assert not bc_info.get('is_hns_enabled') + assert bsc_info.get("sku_name") is not None + assert bsc_info.get("account_kind") is not None + assert not bsc_info.get("is_hns_enabled") + assert cc_info.get("sku_name") is not None + assert cc_info.get("account_kind") is not None + assert not cc_info.get("is_hns_enabled") + assert bc_info.get("sku_name") is not None + assert bc_info.get("account_kind") is not None + assert not bc_info.get("is_hns_enabled") @BlobPreparer() @recorded_by_proxy @@ -2628,8 +2687,8 @@ def test_get_account_information_with_container_name(self, **kwargs): info = container.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @BlobPreparer() @recorded_by_proxy @@ -2644,8 +2703,8 @@ def test_get_account_information_with_blob_name(self, **kwargs): info = blob.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2655,10 +2714,17 @@ def test_get_account_information_with_container_sas(self, **kwargs): self._setup(storage_account_name, storage_account_key) container = self.bsc.get_container_client(self.container_name) - permission = ContainerSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - list=True, tag=True, set_immutability_policy=True, - permanent_delete=True) - assert 'y' in str(permission) + permission = ContainerSasPermissions( + read=True, + write=True, + delete=True, + delete_previous_version=True, + list=True, + tag=True, + set_immutability_policy=True, + permanent_delete=True, + ) + assert "y" in str(permission) token = self.generate_sas( generate_container_sas, container.account_name, @@ -2673,8 +2739,8 @@ def test_get_account_information_with_container_sas(self, **kwargs): info = sas_container.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2702,8 +2768,8 @@ def test_get_account_information_with_blob_sas(self, **kwargs): info = sas_blob.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2712,7 +2778,7 @@ def test_download_to_file_with_sas(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = self._create_blob(data=data) sas_token = self.generate_sas( @@ -2742,7 +2808,7 @@ def test_download_to_file_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = self._create_blob(data=data) # Act @@ -2760,7 +2826,7 @@ def test_download_to_stream_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = self._create_blob(data=data) # Act @@ -2778,7 +2844,7 @@ def test_download_to_file_with_existing_file(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = self._create_blob(data=data) # Act @@ -2803,23 +2869,19 @@ def test_download_to_file_with_existing_file_overwrite(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = self._create_blob(data=data) - file_path = 'file_with_existing_file_overwrite.temp.{}.dat'.format(str(uuid.uuid4())) + file_path = "file_with_existing_file_overwrite.temp.{}.dat".format(str(uuid.uuid4())) # Act - download_blob_from_url( - source_blob.url, file_path, - credential=storage_account_key) + download_blob_from_url(source_blob.url, file_path, credential=storage_account_key) - data2 = b'ABC' * 1024 + data2 = b"ABC" * 1024 source_blob = self._create_blob(data=data2) - download_blob_from_url( - source_blob.url, file_path, overwrite=True, - credential=storage_account_key) + download_blob_from_url(source_blob.url, file_path, overwrite=True, credential=storage_account_key) # Assert - with open(file_path, 'rb') as stream: + with open(file_path, "rb") as stream: actual = stream.read() assert data2 == actual self._teardown(file_path) @@ -2831,7 +2893,7 @@ def test_upload_to_url_bytes_with_sas(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2862,13 +2924,12 @@ def test_upload_to_url_bytes_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - uploaded = upload_blob_to_url( - blob.url, data, credential=storage_account_key) + uploaded = upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert assert uploaded is not None @@ -2882,15 +2943,14 @@ def test_upload_to_url_bytes_with_existing_blob(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(b"existing_data") # Act with pytest.raises(ResourceExistsError): - upload_blob_to_url( - blob.url, data, credential=storage_account_key) + upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert content = blob.download_blob().readall() @@ -2903,16 +2963,13 @@ def test_upload_to_url_bytes_with_existing_blob_overwrite(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(b"existing_data") # Act - uploaded = upload_blob_to_url( - blob.url, data, - overwrite=True, - credential=storage_account_key) + uploaded = upload_blob_to_url(blob.url, data, overwrite=True, credential=storage_account_key) # Assert assert uploaded is not None @@ -2926,18 +2983,17 @@ def test_upload_to_url_text_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = '123' * 1024 + data = "123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - uploaded = upload_blob_to_url( - blob.url, data, credential=storage_account_key) + uploaded = upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert assert uploaded is not None - stream = blob.download_blob(encoding='UTF-8') + stream = blob.download_blob(encoding="UTF-8") content = stream.readall() assert data == content @@ -2948,7 +3004,7 @@ def test_upload_to_url_file_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2966,17 +3022,17 @@ def test_upload_to_url_file_with_credential(self, **kwargs): def test_set_blob_permission_from_string(self): # Arrange permission1 = BlobSasPermissions(read=True, write=True) - permission2 = BlobSasPermissions.from_string('wr') + permission2 = BlobSasPermissions.from_string("wr") assert permission1.read == permission2.read assert permission1.write == permission2.write def test_set_blob_permission(self): # Arrange - permission = BlobSasPermissions.from_string('wrdx') + permission = BlobSasPermissions.from_string("wrdx") assert permission.read == True assert permission.delete == True assert permission.write == True - assert permission._str == 'rwdx' + assert permission._str == "rwdx" @BlobPreparer() @recorded_by_proxy @@ -2984,9 +3040,11 @@ def test_transport_closed_only_once(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - container_name = self.get_resource_name('utcontainersync') + container_name = self.get_resource_name("utcontainersync") transport = RequestsTransport() - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=transport) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=transport + ) blob_name = self._get_blob_reference() with bsc: bsc.get_service_properties() @@ -3008,21 +3066,23 @@ def test_set_blob_tier_for_a_version(self, **kwargs): data_for_the_first_version = "abc" data_for_the_second_version = "efgefgefg" resp = blob.upload_blob(data_for_the_first_version, overwrite=True) - assert resp['version_id'] is not None + assert resp["version_id"] is not None blob.upload_blob(data_for_the_second_version, overwrite=True) blob.set_standard_blob_tier(StandardBlobTier.Cool) - blob.set_standard_blob_tier(StandardBlobTier.Cool, rehydrate_priority=RehydratePriority.high, version_id=resp['version_id']) - blob.set_standard_blob_tier(StandardBlobTier.Hot, version_id=resp['version_id']) + blob.set_standard_blob_tier( + StandardBlobTier.Cool, rehydrate_priority=RehydratePriority.high, version_id=resp["version_id"] + ) + blob.set_standard_blob_tier(StandardBlobTier.Hot, version_id=resp["version_id"]) # Act - props = blob.get_blob_properties(version_id=resp['version_id']) + props = blob.get_blob_properties(version_id=resp["version_id"]) origin_props = blob.get_blob_properties() # Assert assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(data_for_the_first_version) - assert props.blob_tier == 'Hot' - assert origin_props.blob_tier == 'Cool' + assert props.blob_tier == "Hot" + assert origin_props.blob_tier == "Cool" @BlobPreparer() @recorded_by_proxy @@ -3031,11 +3091,14 @@ def test_access_token_refresh_after_retry(self, **kwargs): def fail_response(response): response.http_response.status_code = 408 + token_credential = FakeTokenCredential() retry = LinearRetry(backoff=2, random_jitter_range=1, retry_total=4) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential, retry_policy=retry) - self.container_name = self.get_resource_name('retrytest') + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=token_credential, retry_policy=retry + ) + self.container_name = self.get_resource_name("retrytest") container = bsc.get_container_client(self.container_name) with pytest.raises(Exception): container.create_container(raw_response_hook=fail_response) @@ -3052,45 +3115,51 @@ def test_blob_immutability_policy(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) blob.upload_blob(b"abc", overwrite=True) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) resp = blob.set_immutability_policy(immutability_policy=immutability_policy) # Assert # check immutability policy after set_immutability_policy() props = blob.get_blob_properties() - assert resp['immutability_policy_until_date'] is not None - assert resp['immutability_policy_mode'] is not None - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None - assert props['immutability_policy']['policy_mode'] == "unlocked" + assert resp["immutability_policy_until_date"] is not None + assert resp["immutability_policy_mode"] is not None + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None + assert props["immutability_policy"]["policy_mode"] == "unlocked" # check immutability policy after delete_immutability_policy() blob.delete_immutability_policy() props = blob.get_blob_properties() - assert props['immutability_policy']['policy_mode'] is None - assert props['immutability_policy']['policy_mode'] is None + assert props["immutability_policy"]["policy_mode"] is None + assert props["immutability_policy"]["policy_mode"] is None if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3103,17 +3172,20 @@ def test_blob_legal_hold(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) blob.upload_blob(b"abc", overwrite=True) resp = blob.set_legal_hold(True) @@ -3122,20 +3194,22 @@ def test_blob_legal_hold(self, **kwargs): with pytest.raises(HttpResponseError): blob.delete_blob() - assert resp['legal_hold'] - assert props['has_legal_hold'] + assert resp["legal_hold"] + assert props["has_legal_hold"] resp2 = blob.set_legal_hold(False) props2 = blob.get_blob_properties() - assert not resp2['legal_hold'] - assert not props2['has_legal_hold'] + assert not resp2["legal_hold"] + assert not props2["has_legal_hold"] if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) @BlobPreparer() @recorded_by_proxy @@ -3146,43 +3220,46 @@ def test_download_blob_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) content = b"abcedfg" - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - blob.upload_blob(content, - immutability_policy=immutability_policy, - legal_hold=True, - overwrite=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + blob.upload_blob(content, immutability_policy=immutability_policy, legal_hold=True, overwrite=True) download_resp = blob.download_blob() with pytest.raises(HttpResponseError): blob.delete_blob() - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None # Cleanup if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3195,39 +3272,43 @@ def test_list_blobs_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") container_client = self.bsc.get_container_client(container_name) blob = self.bsc.get_blob_client(container_name, blob_name) content = b"abcedfg" - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - blob.upload_blob(content,immutability_policy=immutability_policy, - legal_hold=True, - overwrite=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + blob.upload_blob(content, immutability_policy=immutability_policy, legal_hold=True, overwrite=True) - blob_list = list(container_client.list_blobs(include=['immutabilitypolicy', 'legalhold'])) + blob_list = list(container_client.list_blobs(include=["immutabilitypolicy", "legalhold"])) - assert blob_list[0]['has_legal_hold'] - assert blob_list[0]['immutability_policy']['expiry_time'] is not None - assert blob_list[0]['immutability_policy']['policy_mode'] is not None + assert blob_list[0]["has_legal_hold"] + assert blob_list[0]["immutability_policy"]["expiry_time"] is not None + assert blob_list[0]["immutability_policy"]["policy_mode"] is not None if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3258,22 +3339,22 @@ def test_download_properties(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) # Assert - data = blob.download_blob(encoding='utf-8') + data = blob.download_blob(encoding="utf-8") props = data.properties assert data is not None assert data.readall() == blob_data - assert props['name'] == blob_name - assert props['creation_time'] is not None - assert props['content_settings'] is not None - assert props['size'] == len(blob_data) + assert props["name"] == blob_name + assert props["creation_time"] is not None + assert props["content_settings"] is not None + assert props["size"] == len(blob_data) @BlobPreparer() @recorded_by_proxy @@ -3284,7 +3365,7 @@ def test_blob_version_id_operations(self, **kwargs): self._setup(versioned_storage_account_name, versioned_storage_account_key) container = self.bsc.get_container_client(self.container_name) blob_name = self.get_resource_name("utcontainer") - blob_data = b'abc' + blob_data = b"abc" blob_client = container.get_blob_client(blob_name) tags_a = {"color": "red"} tags_b = {"color": "yellow"} @@ -3292,31 +3373,36 @@ def test_blob_version_id_operations(self, **kwargs): blob_client.upload_blob(blob_data, overwrite=True) v1_props = blob_client.get_blob_properties() - v1_blob = BlobClient(self.bsc.url, container_name=self.container_name, blob_name=blob_name, - version_id=v1_props['version_id'], credential=versioned_storage_account_key) + v1_blob = BlobClient( + self.bsc.url, + container_name=self.container_name, + blob_name=blob_name, + version_id=v1_props["version_id"], + credential=versioned_storage_account_key, + ) blob_client.upload_blob(blob_data * 2, overwrite=True) v2_props = blob_client.get_blob_properties() - v2_blob = container.get_blob_client(v2_props, version_id=v2_props['version_id']) + v2_blob = container.get_blob_client(v2_props, version_id=v2_props["version_id"]) blob_client.upload_blob(blob_data * 3, overwrite=True) v3_props = blob_client.get_blob_properties() v1_blob.set_standard_blob_tier(StandardBlobTier.Cool) v1_blob.set_blob_tags(tags_a) - v2_blob.set_standard_blob_tier(StandardBlobTier.Cool, version_id=v3_props['version_id']) - v1_blob.set_blob_tags(tags_c, version_id=v3_props['version_id']) + v2_blob.set_standard_blob_tier(StandardBlobTier.Cool, version_id=v3_props["version_id"]) + v1_blob.set_blob_tags(tags_c, version_id=v3_props["version_id"]) v2_blob.set_standard_blob_tier(StandardBlobTier.Hot) v2_blob.set_blob_tags(tags_b) # Assert assert (v1_blob.download_blob()).readall() == blob_data assert (v2_blob.download_blob()).readall() == blob_data * 2 - assert (v1_blob.download_blob(version_id=v3_props['version_id'])).readall() == blob_data * 3 + assert (v1_blob.download_blob(version_id=v3_props["version_id"])).readall() == blob_data * 3 assert v1_blob.get_blob_tags() == tags_a assert v2_blob.get_blob_tags() == tags_b - assert v2_blob.get_blob_tags(version_id=v3_props['version_id']) == tags_c - v1_blob.delete_blob(version_id=v2_props['version_id']) + assert v2_blob.get_blob_tags(version_id=v3_props["version_id"]) == tags_c + v1_blob.delete_blob(version_id=v2_props["version_id"]) assert v1_blob.exists() is True - assert v1_blob.exists(version_id=v2_props['version_id']) is False + assert v1_blob.exists(version_id=v2_props["version_id"]) is False assert blob_client.exists() is True @BlobPreparer() @@ -3332,8 +3418,9 @@ def test_storage_account_audience_blob_service_client(self, **kwargs): # Act token_credential = self.get_credential(BlobServiceClient) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), credential=token_credential, - audience=f'https://{storage_account_name}.blob.core.windows.net' + self.account_url(storage_account_name, "blob"), + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert @@ -3355,8 +3442,11 @@ def test_storage_account_audience_blob_client(self, **kwargs): # Act token_credential = self.get_credential(BlobClient) blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, - credential=token_credential, audience=f'https://{storage_account_name}.blob.core.windows.net' + self.bsc.url, + container_name=self.container_name, + blob_name=blob_name, + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert @@ -3375,11 +3465,11 @@ def test_oauth_error_handling(self, **kwargs): creds = ClientSecretCredential( "00000000-0000-0000-0000-000000000000", "00000000-0000-0000-0000-000000000000", - "00000000-0000-0000-0000-000000000000" + 'a' + "00000000-0000-0000-0000-000000000000" + "a", ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=creds, retry_total=0) - container = bsc.get_container_client('testing') + container = bsc.get_container_client("testing") # Act with pytest.raises(ClientAuthenticationError): @@ -3394,7 +3484,7 @@ def test_upload_blob_partial_stream(self, **kwargs): # Arrange self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_container_client(self.container_name).get_blob_client(self._get_blob_reference()) - data = b'abcde' * 100 + data = b"abcde" * 100 stream = BytesIO(data) read_length = 207 @@ -3417,7 +3507,7 @@ def test_upload_blob_partial_stream_chunked(self, **kwargs): self.bsc._config.max_block_size = 1024 blob = self.bsc.get_container_client(self.container_name).get_blob_client(self._get_blob_reference()) - data = b'abcde' * 1024 + data = b"abcde" * 1024 stream = BytesIO(data) length = 3000 @@ -3428,4 +3518,4 @@ def test_upload_blob_partial_stream_chunked(self, **kwargs): result = blob.download_blob().readall() assert result == data[:length] - # ------------------------------------------------------------------------------ \ No newline at end of file + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py index 89c4f9635e51..d9555c343566 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py @@ -21,7 +21,9 @@ HttpResponseError, ResourceNotFoundError, ResourceExistsError, - ClientAuthenticationError, ResourceModifiedError) + ClientAuthenticationError, + ResourceModifiedError, +) from azure.core.pipeline.transport import AioHttpTransport from azure.mgmt.storage.aio import StorageManagementClient from azure.storage.blob.aio import ( @@ -29,7 +31,8 @@ BlobServiceClient, ContainerClient, download_blob_from_url, - upload_blob_to_url) + upload_blob_to_url, +) from azure.storage.blob import ( AccessPolicy, AccountSasPermissions, @@ -48,7 +51,8 @@ StorageErrorCode, generate_account_sas, generate_container_sas, - generate_blob_sas) + generate_blob_sas, +) from devtools_testutils.fake_credentials_async import AsyncFakeCredential from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase @@ -56,8 +60,8 @@ from test_helpers_async import AsyncStream # ------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'container' -TEST_BLOB_PREFIX = 'blob' +TEST_CONTAINER_PREFIX = "container" +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ @@ -65,8 +69,8 @@ class TestStorageCommonBlobAsync(AsyncStorageRecordedTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, storage_account_name, key): self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") self.byte_data = self.get_random_bytes(1024) if self.is_live: try: @@ -83,7 +87,7 @@ async def _create_source_blob(self, data): await blob_client.upload_blob(data, overwrite=True) return blob_client - async def _create_blob(self, tags=None, data=b'', **kwargs): + async def _create_blob(self, tags=None, data=b"", **kwargs): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(data, tags=tags, overwrite=True, **kwargs) @@ -91,7 +95,7 @@ async def _create_blob(self, tags=None, data=b'', **kwargs): async def _setup_remote(self, storage_account_name, key): self.bsc2 = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=key) - self.remote_container_name = 'rmt' + self.remote_container_name = "rmt" def _teardown(self, file_path): if os.path.isfile(file_path): @@ -109,8 +113,13 @@ def _get_blob_reference(self): async def _create_block_blob(self, overwrite=False, tags=None, standard_blob_tier=None): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(self.byte_data, length=len(self.byte_data), overwrite=overwrite, tags=tags, - standard_blob_tier=standard_blob_tier) + await blob.upload_blob( + self.byte_data, + length=len(self.byte_data), + overwrite=overwrite, + tags=tags, + standard_blob_tier=standard_blob_tier, + ) return blob_name async def _create_empty_block_blob(self, overwrite=False, tags=None): @@ -120,7 +129,7 @@ async def _create_empty_block_blob(self, overwrite=False, tags=None): return blob_name async def _create_remote_container(self): - self.remote_container_name = self.get_resource_name('remotectnr') + self.remote_container_name = self.get_resource_name("remotectnr") remote_container = self.bsc2.get_container_client(self.remote_container_name) try: await remote_container.create_container() @@ -129,7 +138,7 @@ async def _create_remote_container(self): async def _create_remote_block_blob(self, blob_data=None): if not blob_data: - blob_data = b'12345678' * 1024 * 1024 + blob_data = b"12345678" * 1024 * 1024 source_blob_name = self._get_blob_reference() source_blob = self.bsc2.get_blob_client(self.remote_container_name, source_blob_name) await source_blob.upload_blob(blob_data, overwrite=True) @@ -138,10 +147,10 @@ async def _create_remote_block_blob(self, blob_data=None): async def _wait_for_async_copy(self, blob): count = 0 props = await blob.get_blob_properties() - while props.copy.status == 'pending': + while props.copy.status == "pending": count = count + 1 if count > 10: - self.fail('Timed out waiting for async copy to complete.') + self.fail("Timed out waiting for async copy to complete.") self.sleep(6) props = await blob.get_blob_properties() return props @@ -170,17 +179,21 @@ async def test_start_copy_from_url_with_oauth(self, **kwargs): source_blob_client = await self._create_source_blob(data=source_blob_data) # Create destination blob destination_blob_client = await self._create_blob() - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token( + "https://storage.azure.com/.default" + ) token = "Bearer {}".format(access_token.token) with pytest.raises(HttpResponseError): await destination_blob_client.start_copy_from_url(source_blob_client.url, requires_sync=True) with pytest.raises(ValueError): await destination_blob_client.start_copy_from_url( - source_blob_client.url, source_authorization=token, requires_sync=False) + source_blob_client.url, source_authorization=token, requires_sync=False + ) await destination_blob_client.start_copy_from_url( - source_blob_client.url, source_authorization=token, requires_sync=True) + source_blob_client.url, source_authorization=token, requires_sync=True + ) destination_blob = await destination_blob_client.download_blob() destination_blob_data = await destination_blob.readall() assert source_blob_data == destination_blob_data @@ -253,7 +266,7 @@ async def test_blob_snapshot_exists(self, **kwargs): # Assert assert prop - assert snapshot['snapshot'] == prop.snapshot + assert snapshot["snapshot"] == prop.snapshot @BlobPreparer() @recorded_by_proxy_async @@ -276,7 +289,7 @@ def data_generator(): data = await dl_blob.readall() # Assert - assert data == raw_data*2 + assert data == raw_data * 2 @BlobPreparer() @recorded_by_proxy_async @@ -286,7 +299,7 @@ async def test_upload_blob_from_async_generator(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - data = b'Hello Async World!' + data = b"Hello Async World!" async def data_generator(): for _ in range(3): @@ -299,7 +312,7 @@ async def data_generator(): # Assert result = await (await blob.download_blob()).readall() - assert result == data*3 + assert result == data * 3 @BlobPreparer() @recorded_by_proxy_async @@ -312,7 +325,7 @@ async def test_upload_blob_from_async_generator_chunks(self, **kwargs): self.bsc._config.max_block_size = 1024 blob_name = self._get_blob_reference() - data = b'abc' * 1024 + data = b"abc" * 1024 async def data_generator(): for _ in range(3): @@ -325,7 +338,7 @@ async def data_generator(): # Assert result = await (await blob.download_blob()).readall() - assert result == data*3 + assert result == data * 3 @pytest.mark.live_test_only @BlobPreparer() @@ -338,7 +351,7 @@ async def test_upload_blob_from_async_generator_chunks_parallel(self, **kwargs): self.bsc._config.max_block_size = 1024 blob_name = self._get_blob_reference() - data = b'abcde' * 1024 + data = b"abcde" * 1024 async def data_generator(): for _ in range(3): @@ -366,12 +379,12 @@ async def test_upload_blob_from_pipe(self, **kwargs): reader_fd, writer_fd = os.pipe() - with os.fdopen(writer_fd, 'wb') as writer: + with os.fdopen(writer_fd, "wb") as writer: writer.write(data) # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - with os.fdopen(reader_fd, mode='rb') as reader: + with os.fdopen(reader_fd, mode="rb") as reader: await blob.upload_blob(data=reader, overwrite=True) blob_data = await (await blob.download_blob()).readall() @@ -484,15 +497,15 @@ async def test_create_blob_with_question_mark(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - blob_name = '?ques?tion?' - blob_data = '???' + blob_name = "?ques?tion?" + blob_data = "???" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Assert - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") data = await stream.readall() assert data is not None assert data == blob_data @@ -505,15 +518,15 @@ async def test_create_blob_with_equal_sign(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - blob_name = '=ques=tion!' - blob_data = '???' + blob_name = "=ques=tion!" + blob_data = "???" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Assert - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") data = await stream.readall() assert data is not None assert data == blob_data @@ -527,14 +540,14 @@ async def test_create_blob_with_special_chars(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) # Act - for c in '-._ /()$=\',~': - blob_name = '{0}a{0}a{0}'.format(c) + for c in "-._ /()$=',~": + blob_name = "{0}a{0}a{0}".format(c) blob_data = c blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data, length=len(blob_data)) data = await (await blob.download_blob()).readall() - content = data.decode('utf-8') + content = data.decode("utf-8") assert content == blob_data @BlobPreparer() @@ -546,15 +559,15 @@ async def test_create_blob_and_download_blob_with_vid(self, **kwargs): # Arrange await self._setup(versioned_storage_account_name, versioned_storage_account_key) # Act - for c in '-._ /()$=\',~': - blob_name = '{0}a{0}a{0}'.format(c) + for c in "-._ /()$=',~": + blob_name = "{0}a{0}a{0}".format(c) blob_data = c blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob.upload_blob(blob_data, length=len(blob_data), overwrite=True) - assert resp.get('version_id') is not None + assert resp.get("version_id") is not None - data = await (await blob.download_blob(version_id=resp.get('version_id'))).readall() - content = data.decode('utf-8') + data = await (await blob.download_blob(version_id=resp.get("version_id"))).readall() + content = data.decode("utf-8") assert content == blob_data # Assert @@ -569,14 +582,14 @@ async def test_create_blob_with_lease_id(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act - data = b'hello world again' + data = b"hello world again" resp = await blob.upload_blob(data, length=len(data), lease=lease) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None stream = await blob.download_blob(lease=lease) content = await stream.readall() assert content == data @@ -590,15 +603,15 @@ async def test_create_blob_with_metadata(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act - data = b'hello world' + data = b"hello world" blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob.upload_blob(data, length=len(data), metadata=metadata) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None md = (await blob.get_blob_properties()).metadata assert md == metadata @@ -609,8 +622,8 @@ async def test_upload_blob_with_dictionary(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - blob_name = 'test_blob' - blob_data = {'hello': 'world'} + blob_name = "test_blob" + blob_data = {"hello": "world"} # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -632,11 +645,12 @@ def gen(): yield "hello" yield "world!" yield " eom" + blob = self.bsc.get_blob_client(self.container_name, "gen_blob") resp = await blob.upload_blob(data=gen()) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None content = await (await blob.download_blob()).readall() assert content == b"helloworld! eom" @@ -653,7 +667,7 @@ async def test_create_blob_with_requests(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, "msft") resp = await blob.upload_blob(data=data.raw, overwrite=True) - assert resp.get('etag') is not None + assert resp.get("etag") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -664,7 +678,7 @@ async def test_create_blob_with_aiohttp(self, **kwargs): await self._setup(storage_account_name, storage_account_key) # Create a blob to download with aiohttp using SAS - data = b'a' * 1024 * 1024 + data = b"a" * 1024 * 1024 blob = await self._create_blob(data=data) sas = self.generate_sas( @@ -678,13 +692,13 @@ async def test_create_blob_with_aiohttp(self, **kwargs): ) # Act - uri = blob.url + '?' + sas + uri = blob.url + "?" + sas async with aiohttp.ClientSession() as session: async with session.get(uri) as data: async for text, _ in data.content.iter_chunks(): - blob2 = self.bsc.get_blob_client(self.container_name, blob.blob_name + '_copy') + blob2 = self.bsc.get_blob_client(self.container_name, blob.blob_name + "_copy") resp = await blob2.upload_blob(data=text, overwrite=True) - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy_async @@ -715,8 +729,7 @@ async def test_get_blob_with_snapshot(self, **kwargs): blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) snap = await blob.create_snapshot() - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=snap) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=snap) # Act stream = await snapshot.download_blob() @@ -736,10 +749,9 @@ async def test_get_blob_with_snapshot_previous(self, **kwargs): blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) snap = await blob.create_snapshot() - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=snap) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=snap) - upload_data = b'hello world again' + upload_data = b"hello world again" await blob.upload_blob(upload_data, length=len(upload_data), overwrite=True) # Act @@ -750,7 +762,7 @@ async def test_get_blob_with_snapshot_previous(self, **kwargs): # Assert assert blob_previous_bytes == self.byte_data - assert blob_latest_bytes == b'hello world again' + assert blob_latest_bytes == b"hello world again" @BlobPreparer() @recorded_by_proxy_async @@ -780,7 +792,7 @@ async def test_get_blob_with_lease(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act stream = await blob.download_blob(lease=lease) @@ -820,15 +832,13 @@ async def test_set_blob_properties_with_existing_blob(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.set_http_headers( - content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), ) # Assert props = await blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy_async @@ -843,21 +853,19 @@ async def test_set_blob_properties_with_if_tags(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) with pytest.raises(ResourceModifiedError): - await blob.set_http_headers(content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), - if_tags_match_condition="\"tag1\"='first tag'") + await blob.set_http_headers( + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), + if_tags_match_condition="\"tag1\"='first tag'", + ) await blob.set_http_headers( - content_settings=ContentSettings( - content_language='spanish', - content_disposition='inline'), - if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + content_settings=ContentSettings(content_language="spanish", content_disposition="inline"), + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", ) # Assert props = await blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy_async @@ -872,14 +880,14 @@ async def test_set_blob_properties_with_blob_settings_param(self, **kwargs): props = await blob.get_blob_properties() # Act - props.content_settings.content_language = 'spanish' - props.content_settings.content_disposition = 'inline' + props.content_settings.content_language = "spanish" + props.content_settings.content_disposition = "inline" await blob.set_http_headers(content_settings=props.content_settings) # Assert props = await blob.get_blob_properties() - assert props.content_settings.content_language == 'spanish' - assert props.content_settings.content_disposition == 'inline' + assert props.content_settings.content_language == "spanish" + assert props.content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy_async @@ -899,7 +907,7 @@ async def test_get_blob_properties(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.lease.status == 'unlocked' + assert props.lease.status == "unlocked" assert props.creation_time is not None @BlobPreparer() @@ -921,7 +929,7 @@ async def test_get_blob_properties_returns_rehydrate_priority(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.rehydrate_priority == 'High' + assert props.rehydrate_priority == "High" @BlobPreparer() @recorded_by_proxy_async @@ -1026,8 +1034,8 @@ async def test_no_server_encryption(self, **kwargs): # Arrange self.bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") self.byte_data = self.get_random_bytes(1024) await self.bsc.create_container(self.container_name) blob_name = await self._create_block_blob() @@ -1035,7 +1043,7 @@ async def test_no_server_encryption(self, **kwargs): # Act def callback(response): - response.http_response.headers['x-ms-server-encrypted'] = 'false' + response.http_response.headers["x-ms-server-encrypted"] = "false" props = await blob.get_blob_properties(raw_response_hook=callback) @@ -1055,7 +1063,7 @@ async def test_get_blob_properties_with_snapshot(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) res = await blob.create_snapshot() blobs = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blobs.append(b) assert len(blobs) == 2 @@ -1079,7 +1087,7 @@ async def test_get_blob_properties_with_leased_blob(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act props = await blob.get_blob_properties() @@ -1088,9 +1096,9 @@ async def test_get_blob_properties_with_leased_blob(self, **kwargs): assert isinstance(props, BlobProperties) assert props.blob_type == BlobType.BlockBlob assert props.size == len(self.byte_data) - assert props.lease.status == 'locked' - assert props.lease.state == 'leased' - assert props.lease.duration == 'infinite' + assert props.lease.status == "locked" + assert props.lease.state == "leased" + assert props.lease.duration == "infinite" @BlobPreparer() @recorded_by_proxy_async @@ -1117,7 +1125,7 @@ async def test_set_blob_metadata_with_upper_case(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - metadata = {'hello': ' world ', ' number ': '42', 'UP': 'UPval'} + metadata = {"hello": " world ", " number ": "42", "UP": "UPval"} blob_name = await self._create_block_blob() # Act @@ -1127,10 +1135,10 @@ async def test_set_blob_metadata_with_upper_case(self, **kwargs): # Assert md = (await blob.get_blob_properties()).metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy_async @@ -1141,22 +1149,24 @@ async def test_set_blob_metadata_with_if_tags(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} - metadata = {'hello': ' world ', ' number ': '42', 'UP': 'UPval'} + metadata = {"hello": " world ", " number ": "42", "UP": "UPval"} blob_name = await self._create_block_blob(tags=tags, overwrite=True) # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) with pytest.raises(ResourceModifiedError): await blob.set_blob_metadata(metadata, if_tags_match_condition="\"tag1\"='first tag'") - await blob.set_blob_metadata(metadata, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + await blob.set_blob_metadata( + metadata, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'" + ) # Assert md = (await blob.get_blob_properties()).metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy_async @@ -1166,7 +1176,7 @@ async def test_set_blob_metadata_returns_vid(self, **kwargs): # Arrange await self._setup(versioned_storage_account_name, versioned_storage_account_key) - metadata = {'hello': 'world', 'number': '42', 'UP': 'UPval'} + metadata = {"hello": "world", "number": "42", "UP": "UPval"} blob_name = await self._create_block_blob() # Act @@ -1174,13 +1184,13 @@ async def test_set_blob_metadata_returns_vid(self, **kwargs): resp = await blob.set_blob_metadata(metadata) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None md = (await blob.get_blob_properties()).metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['UP'] == 'UPval' - assert not 'up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["UP"] == "UPval" + assert not "up" in md @BlobPreparer() @recorded_by_proxy_async @@ -1216,7 +1226,11 @@ async def test_delete_blob_with_if_tags(self, **kwargs): with pytest.raises(ResourceModifiedError): await blob.delete_blob(if_tags_match_condition="\"tag1\"='first tag'") - resp = await blob.delete_blob(etag=prop.etag, match_condition=MatchConditions.IfNotModified, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + resp = await blob.delete_blob( + etag=prop.etag, + match_condition=MatchConditions.IfNotModified, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) # Assert assert resp is None @@ -1233,16 +1247,16 @@ async def test_delete_specific_blob_version(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - resp = await blob.upload_blob(b'abc', overwrite=True) + resp = await blob.upload_blob(b"abc", overwrite=True) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None # upload to override the previous version - await blob.upload_blob(b'abc', overwrite=True) + await blob.upload_blob(b"abc", overwrite=True) # Act - resp = await blob.delete_blob(version_id=resp['version_id']) + resp = await blob.delete_blob(version_id=resp["version_id"]) blob_list = [] async for blob in self.bsc.get_container_client(self.container_name).list_blobs(include="versions"): blob_list.append(blob) @@ -1259,11 +1273,11 @@ async def test_delete_blob_version_with_blob_sas(self, **kwargs): await self._setup(versioned_storage_account_name, versioned_storage_account_key) blob_name = await self._create_block_blob() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) - resp = await blob_client.upload_blob(b'abcde', overwrite=True) + resp = await blob_client.upload_blob(b"abcde", overwrite=True) - version_id = resp['version_id'] + version_id = resp["version_id"] assert version_id is not None - await blob_client.upload_blob(b'abc', overwrite=True) + await blob_client.upload_blob(b"abc", overwrite=True) token = self.generate_sas( generate_blob_sas, @@ -1313,8 +1327,7 @@ async def test_delete_blob_snapshot(self, **kwargs): blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) snap = await blob.create_snapshot() - snapshot = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=snap) + snapshot = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=snap) # Act await snapshot.delete_blob() @@ -1322,7 +1335,7 @@ async def test_delete_blob_snapshot(self, **kwargs): # Assert container = self.bsc.get_container_client(self.container_name) blobs = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blobs.append(b) assert len(blobs) == 1 assert blobs[0].name == blob_name @@ -1341,12 +1354,12 @@ async def test_delete_blob_snapshots(self, **kwargs): await blob.create_snapshot() # Act - await blob.delete_blob(delete_snapshots='only') + await blob.delete_blob(delete_snapshots="only") # Assert container = self.bsc.get_container_client(self.container_name) blobs = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blobs.append(b) assert len(blobs) == 1 assert blobs[0].snapshot is None @@ -1365,20 +1378,20 @@ async def test_create_blob_snapshot_returns_vid(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob.create_snapshot() blobs = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blobs.append(b) # Assert - assert resp['version_id'] is not None + assert resp["version_id"] is not None # Both create blob and create snapshot will create a new version assert len(blobs) >= 2 # Act - await blob.delete_blob(delete_snapshots='only') + await blob.delete_blob(delete_snapshots="only") # Assert blobs = [] - async for b in container.list_blobs(include=['snapshots', 'versions']): + async for b in container.list_blobs(include=["snapshots", "versions"]): blobs.append(b) assert len(blobs) > 0 assert blobs[0].snapshot is None @@ -1399,12 +1412,12 @@ async def test_delete_blob_with_snapshots(self, **kwargs): # with pytest.raises(HttpResponseError): # blob.delete_blob() - await blob.delete_blob(delete_snapshots='include') + await blob.delete_blob(delete_snapshots="include") # Assert container = self.bsc.get_container_client(self.container_name) blobs = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blobs.append(b) assert len(blobs) == 0 @@ -1424,14 +1437,13 @@ async def test_soft_delete_blob_without_snapshots(self, **kwargs): # Soft delete the blob await blob.delete_blob() blob_list = [] - async for b in container.list_blobs(include='deleted'): + async for b in container.list_blobs(include="deleted"): blob_list.append(b) # Assert assert len(blob_list) == 1 self._assert_blob_is_soft_deleted(blob_list[0]) - # list_blobs should not list soft deleted blobs if Include(deleted=True) is not specified blob_list = [] async for b in container.list_blobs(): @@ -1443,7 +1455,7 @@ async def test_soft_delete_blob_without_snapshots(self, **kwargs): # Restore blob with undelete await blob.undelete_blob() blob_list = [] - async for b in container.list_blobs(include='deleted'): + async for b in container.list_blobs(include="deleted"): blob_list.append(b) # Assert @@ -1464,12 +1476,11 @@ async def test_soft_delete_single_blob_snapshot(self, **kwargs): blob_snapshot_2 = await blob.create_snapshot() # Soft delete blob_snapshot_1 - snapshot_1 = self.bsc.get_blob_client( - self.container_name, blob_name, snapshot=blob_snapshot_1) + snapshot_1 = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob_snapshot_1) await snapshot_1.delete_blob() with pytest.raises(ValueError): - await snapshot_1.delete_blob(delete_snapshots='only') + await snapshot_1.delete_blob(delete_snapshots="only") container = self.bsc.get_container_client(self.container_name) blob_list = [] @@ -1479,14 +1490,14 @@ async def test_soft_delete_single_blob_snapshot(self, **kwargs): # Assert assert len(blob_list) == 3 for listedblob in blob_list: - if listedblob.snapshot == blob_snapshot_1['snapshot']: + if listedblob.snapshot == blob_snapshot_1["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) else: self._assert_blob_not_soft_deleted(listedblob) # list_blobs should not list soft deleted blob snapshots if Include(deleted=True) is not specified blob_list = [] - async for b in container.list_blobs(include='snapshots'): + async for b in container.list_blobs(include="snapshots"): blob_list.append(b) # Assert @@ -1517,7 +1528,7 @@ async def test_soft_delete_only_snapshots_of_blob(self, **kwargs): blob_snapshot_2 = await blob.create_snapshot() # Soft delete all snapshots - await blob.delete_blob(delete_snapshots='only') + await blob.delete_blob(delete_snapshots="only") container = self.bsc.get_container_client(self.container_name) blob_list = [] async for b in container.list_blobs(include=["snapshots", "deleted"]): @@ -1526,9 +1537,9 @@ async def test_soft_delete_only_snapshots_of_blob(self, **kwargs): # Assert assert len(blob_list) == 3 for listedblob in blob_list: - if listedblob.snapshot == blob_snapshot_1['snapshot']: + if listedblob.snapshot == blob_snapshot_1["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) - elif listedblob.snapshot == blob_snapshot_2['snapshot']: + elif listedblob.snapshot == blob_snapshot_2["snapshot"]: self._assert_blob_is_soft_deleted(listedblob) else: self._assert_blob_not_soft_deleted(listedblob) @@ -1566,7 +1577,7 @@ async def test_soft_delete_blob_including_all_snapshots(self, **kwargs): blob_snapshot_2 = await blob.create_snapshot() # Soft delete blob and all snapshots - await blob.delete_blob(delete_snapshots='include') + await blob.delete_blob(delete_snapshots="include") container = self.bsc.get_container_client(self.container_name) blob_list = [] async for b in container.list_blobs(include=["snapshots", "deleted"]): @@ -1606,7 +1617,7 @@ async def test_soft_delete_with_leased_blob(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Soft delete the blob without lease_id should fail with pytest.raises(HttpResponseError): @@ -1654,10 +1665,11 @@ async def test_async_copy_blob_with_if_tags(self, **kwargs): tags1 = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") await copyblob.upload_blob("abc", overwrite=True) await copyblob.set_blob_tags(tags=tags1) @@ -1673,18 +1685,26 @@ async def test_async_copy_blob_with_if_tags(self, **kwargs): assert len(dest_tags) == len(tags) with pytest.raises(ResourceModifiedError): - await copyblob.start_copy_from_url(sourceblob, tags=tags, source_if_tags_match_condition="\"source\"='sourcetag'") - await copyblob.start_copy_from_url(sourceblob, tags=tags, source_if_tags_match_condition="\"source\"='source tag'") + await copyblob.start_copy_from_url( + sourceblob, tags=tags, source_if_tags_match_condition="\"source\"='sourcetag'" + ) + await copyblob.start_copy_from_url( + sourceblob, tags=tags, source_if_tags_match_condition="\"source\"='source tag'" + ) with pytest.raises(ResourceModifiedError): - await copyblob.start_copy_from_url(sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='abc'") - copy = await copyblob.start_copy_from_url(sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='first tag'") + await copyblob.start_copy_from_url( + sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='abc'" + ) + copy = await copyblob.start_copy_from_url( + sourceblob, tags={"tag1": "abc"}, if_tags_match_condition="\"tag1\"='first tag'" + ) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None with pytest.raises(ResourceModifiedError): await (await copyblob.download_blob(if_tags_match_condition="\"tag1\"='abc1'")).readall() @@ -1703,18 +1723,19 @@ async def test_copy_blob_returns_vid(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = await copyblob.start_copy_from_url(sourceblob) # Assert assert copy is not None - assert copy['version_id'] is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["version_id"] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = await (await copyblob.download_blob()).readall() assert copy_content == self.byte_data @@ -1731,17 +1752,18 @@ async def test_copy_blob_with_existing_blob(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') + copyblob = self.bsc.get_blob_client(self.container_name, "blob1copy") copy = await copyblob.start_copy_from_url(sourceblob) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) + assert copy["copy_id"] is not None copy_content = await (await copyblob.download_blob()).readall() assert copy_content == self.byte_data @@ -1756,43 +1778,49 @@ async def test_copy_blob_with_immutability_policy(self, **kwargs): await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = await self._create_block_blob() # Act - sourceblob = '{0}/{1}/{2}'.format( - self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name) + sourceblob = "{0}/{1}/{2}".format( + self.account_url(versioned_storage_account_name, "blob"), self.container_name, blob_name + ) - copyblob = self.bsc.get_blob_client(container_name, 'blob1copy') - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) + copyblob = self.bsc.get_blob_client(container_name, "blob1copy") + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) - copy = await copyblob.start_copy_from_url(sourceblob, immutability_policy=immutability_policy, - legal_hold=True) + copy = await copyblob.start_copy_from_url(sourceblob, immutability_policy=immutability_policy, legal_hold=True) download_resp = await copyblob.download_blob() assert await download_resp.readall() == self.byte_data - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None assert copy is not None - assert copy['copy_status'] == 'success' - assert not isinstance(copy['copy_status'], Enum) + assert copy["copy_status"] == "success" + assert not isinstance(copy["copy_status"], Enum) if self.is_live: await copyblob.delete_immutability_policy() await copyblob.set_legal_hold(False) await copyblob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -1811,7 +1839,7 @@ async def test_copy_blob_async_private_blob_no_sas(self, **kwargs): source_blob = await self._create_remote_block_blob() # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) # Assert @@ -1828,7 +1856,7 @@ async def test_copy_blob_async_private_blob_with_sas(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'12345678' * 1024 * 1024 + data = b"12345678" * 1024 * 1024 await self._setup_remote(secondary_storage_account_name, secondary_storage_account_key) await self._create_remote_container() source_blob = await self._create_remote_block_blob(blob_data=data) @@ -1845,13 +1873,13 @@ async def test_copy_blob_async_private_blob_with_sas(self, **kwargs): blob = BlobClient.from_blob_url(source_blob.url, credential=sas_token) # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) copy_resp = await target_blob.start_copy_from_url(blob.url) # Assert props = await self._wait_for_async_copy(target_blob) - assert props.copy.status == 'success' + assert props.copy.status == "success" actual_data = await (await target_blob.download_blob()).readall() assert actual_data == data @@ -1864,22 +1892,22 @@ async def test_abort_copy_blob(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) source_blob = "https://www.gutenberg.org/files/59466/59466-0.txt" - copied_blob = self.bsc.get_blob_client(self.container_name, '59466-0.txt') + copied_blob = self.bsc.get_blob_client(self.container_name, "59466-0.txt") # Act copy = await copied_blob.start_copy_from_url(source_blob) - assert copy['copy_status'] == 'pending' + assert copy["copy_status"] == "pending" try: await copied_blob.abort_copy(copy) props = await self._wait_for_async_copy(copied_blob) - assert props.copy.status == 'aborted' + assert props.copy.status == "aborted" # Assert actual_data = await copied_blob.download_blob() bytes_data = await (await copied_blob.download_blob()).readall() assert bytes_data == b"" - assert actual_data.properties.copy.status == 'aborted' + assert actual_data.properties.copy.status == "aborted" # In the Live test pipeline, the copy occasionally finishes before it can be aborted. # Catch and assert on error code to prevent this test from failing. @@ -1898,7 +1926,7 @@ async def test_abort_copy_blob_with_synchronous_copy_fails(self, **kwargs): source_blob = self.bsc.get_blob_client(self.container_name, source_blob_name) # Act - target_blob_name = 'targetblob' + target_blob_name = "targetblob" target_blob = self.bsc.get_blob_client(self.container_name, target_blob_name) copy_resp = await target_blob.start_copy_from_url(source_blob.url) @@ -1906,7 +1934,7 @@ async def test_abort_copy_blob_with_synchronous_copy_fails(self, **kwargs): await target_blob.abort_copy(copy_resp) # Assert - assert copy_resp['copy_status'] == 'success' + assert copy_resp["copy_status"] == "success" @BlobPreparer() @recorded_by_proxy_async @@ -1924,7 +1952,7 @@ async def test_snapshot_blob(self, **kwargs): # Assert assert resp is not None - assert resp['snapshot'] is not None + assert resp["snapshot"] is not None @BlobPreparer() @recorded_by_proxy_async @@ -1938,9 +1966,9 @@ async def test_lease_blob_acquire_and_release(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") await lease.release() - lease2 = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease2 = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Assert assert lease is not None @@ -1958,13 +1986,13 @@ async def test_lease_blob_with_duration(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) - resp = await blob.upload_blob(b'hello 2', length=7, lease=lease) + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) + resp = await blob.upload_blob(b"hello 2", length=7, lease=lease) self.sleep(20) # Assert with pytest.raises(HttpResponseError): - await blob.upload_blob(b'hello 3', length=7, lease=lease) + await blob.upload_blob(b"hello 3", length=7, lease=lease) @BlobPreparer() @recorded_by_proxy_async @@ -1978,7 +2006,7 @@ async def test_lease_blob_with_proposed_lease_id(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0' + lease_id = "a0e6c241-96ea-45a3-a44b-6ae868bc14d0" lease = await blob.acquire_lease(lease_id=lease_id) # Assert @@ -1996,8 +2024,8 @@ async def test_lease_blob_change_lease_id(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0' - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = "a0e6c241-96ea-45a3-a44b-6ae868bc14d0" + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") first_lease_id = lease.id await lease.change(lease_id) await lease.renew() @@ -2018,19 +2046,19 @@ async def test_lease_blob_break_period(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) lease_time = await lease.break_lease(lease_break_period=5) - resp = await blob.upload_blob(b'hello 2', length=7, lease=lease) + resp = await blob.upload_blob(b"hello 2", length=7, lease=lease) self.sleep(5) with pytest.raises(HttpResponseError): - await blob.upload_blob(b'hello 3', length=7, lease=lease) + await blob.upload_blob(b"hello 3", length=7, lease=lease) # Assert assert lease.id is not None assert lease_time is not None - assert resp.get('etag') is not None + assert resp.get("etag") is not None @BlobPreparer() @recorded_by_proxy_async @@ -2044,7 +2072,7 @@ async def test_lease_blob_acquire_and_renew(self, **kwargs): # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") first_id = lease.id await lease.renew() @@ -2061,11 +2089,11 @@ async def test_lease_blob_acquire_twice_fails(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act with pytest.raises(HttpResponseError): - await blob.acquire_lease(lease_id='00000000-1111-2222-3333-555555555555') + await blob.acquire_lease(lease_id="00000000-1111-2222-3333-555555555555") # Assert assert lease.id is not None @@ -2078,16 +2106,16 @@ async def test_unicode_get_blob_unicode_name(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - blob_name = '啊齄丂狛狜' + blob_name = "啊齄丂狛狜" blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(b'hello world') + await blob.upload_blob(b"hello world") # Act stream = await blob.download_blob() content = await stream.readall() # Assert - assert content == b'hello world' + assert content == b"hello world" @BlobPreparer() @recorded_by_proxy_async @@ -2101,11 +2129,11 @@ async def test_create_blob_blob_unicode_data(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - data = u'hello world啊齄丂狛狜' + data = "hello world啊齄丂狛狜" resp = await blob.upload_blob(data) # Assert - assert resp.get('etag') is not None + assert resp.get("etag") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2118,9 +2146,18 @@ async def test_sas_access_blob(self, **kwargs): blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - permission = BlobSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - permanent_delete=True, list=True, add=True, create=True, update=True) - assert 'y' in str(permission) + permission = BlobSasPermissions( + read=True, + write=True, + delete=True, + delete_previous_version=True, + permanent_delete=True, + list=True, + add=True, + create=True, + update=True, + ) + assert "y" in str(permission) token = self.generate_sas( generate_blob_sas, @@ -2153,14 +2190,14 @@ async def test_sas_signed_identifier(self, **kwargs): container = self.bsc.get_container_client(self.container_name) blob = self.bsc.get_blob_client(self.container_name, blob_name) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow() - timedelta(hours=1)) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + start = self.get_datetime_variable(variables, "start", datetime.utcnow() - timedelta(hours=1)) + expiry = self.get_datetime_variable(variables, "expiry", datetime.utcnow() + timedelta(hours=1)) access_policy = AccessPolicy() access_policy.start = start access_policy.expiry = expiry access_policy.permission = BlobSasPermissions(read=True) - identifiers = {'testid': access_policy} + identifiers = {"testid": access_policy} await container.set_container_access_policy(identifiers) @@ -2171,7 +2208,8 @@ async def test_sas_signed_identifier(self, **kwargs): blob.blob_name, snapshot=blob.snapshot, account_key=blob.credential.account_key, - policy_id='testid') + policy_id="testid", + ) # Act service = BlobClient.from_blob_url(blob.url, credential=token) @@ -2192,9 +2230,10 @@ async def test_account_sas(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() - account_sas_permission = AccountSasPermissions(read=True, write=True, delete=True, add=True, - permanent_delete=True, list=True) - assert 'y' in str(account_sas_permission) + account_sas_permission = AccountSasPermissions( + read=True, write=True, delete=True, add=True, permanent_delete=True, list=True + ) + assert "y" in str(account_sas_permission) token = self.generate_sas( generate_account_sas, @@ -2206,10 +2245,8 @@ async def test_account_sas(self, **kwargs): ) # Act - blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=token) - container = ContainerClient( - self.bsc.url, container_name=self.container_name, credential=token) + blob = BlobClient(self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=token) + container = ContainerClient(self.bsc.url, container_name=self.container_name, credential=token) container_props = await container.get_container_properties() blob_props = await blob.get_blob_properties() @@ -2238,9 +2275,11 @@ async def test_account_sas_credential(self, **kwargs): # Act blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=AzureSasCredential(token)) + self.bsc.url, container_name=self.container_name, blob_name=blob_name, credential=AzureSasCredential(token) + ) container = ContainerClient( - self.bsc.url, container_name=self.container_name, credential=AzureSasCredential(token)) + self.bsc.url, container_name=self.container_name, credential=AzureSasCredential(token) + ) blob_properties = await blob.get_blob_properties() container_properties = await container.get_container_properties() @@ -2261,11 +2300,11 @@ async def test_multiple_services_sas(self, **kwargs): ResourceTypes(container=True, object=True, service=True), AccountSasPermissions(read=True, list=True), datetime.utcnow() + timedelta(hours=1), - services=Services(blob=True, fileshare=True) + services=Services(blob=True, fileshare=True), ) # Assert - assert 'ss=bf' in token + assert "ss=bf" in token @BlobPreparer() @recorded_by_proxy_async @@ -2317,7 +2356,7 @@ async def test_token_credential_blob(self, **kwargs): # Setup container_name = self._get_container_reference() blob_name = self._get_blob_reference() - blob_data = b'Helloworld' + blob_data = b"Helloworld" token_credential = self.get_credential(BlobServiceClient, is_async=True) service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) @@ -2344,13 +2383,15 @@ async def test_token_credential_with_batch_operation(self, **kwargs): container_name = self._get_container_reference() blob_name = self._get_blob_reference() token_credential = self.get_credential(BlobServiceClient, is_async=True) - async with BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) as service: + async with BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=token_credential + ) as service: container = service.get_container_client(container_name) try: await container.create_container() - await container.upload_blob(blob_name + '1', b'HelloWorld') - await container.upload_blob(blob_name + '2', b'HelloWorld') - await container.upload_blob(blob_name + '3', b'HelloWorld') + await container.upload_blob(blob_name + "1", b"HelloWorld") + await container.upload_blob(blob_name + "2", b"HelloWorld") + await container.upload_blob(blob_name + "3", b"HelloWorld") delete_batch = [] blob_list = container.list_blobs(name_starts_with=blob_name) @@ -2412,11 +2453,11 @@ async def test_shared_read_access_blob_with_content_query_params(self, **kwargs) account_key=blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), - cache_control='no-cache', - content_disposition='inline', - content_encoding='utf-8', - content_language='fr', - content_type='text', + cache_control="no-cache", + content_disposition="inline", + content_encoding="utf-8", + content_language="fr", + content_type="text", ) sas_blob = BlobClient.from_blob_url(blob.url, credential=token) @@ -2426,11 +2467,11 @@ async def test_shared_read_access_blob_with_content_query_params(self, **kwargs) # Assert response.raise_for_status() assert self.byte_data == response.content - assert response.headers['cache-control'] == 'no-cache' - assert response.headers['content-disposition'] == 'inline' - assert response.headers['content-encoding'] == 'utf-8' - assert response.headers['content-language'] == 'fr' - assert response.headers['content-type'] == 'text' + assert response.headers["cache-control"] == "no-cache" + assert response.headers["content-disposition"] == "inline" + assert response.headers["content-encoding"] == "utf-8" + assert response.headers["content-language"] == "fr" + assert response.headers["content-type"] == "text" @pytest.mark.live_test_only @BlobPreparer() @@ -2440,7 +2481,7 @@ async def test_shared_write_access_blob(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - updated_data = b'updated blob data' + updated_data = b"updated blob data" blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2457,7 +2498,7 @@ async def test_shared_write_access_blob(self, **kwargs): sas_blob = BlobClient.from_blob_url(blob.url, credential=token) # Act - headers = {'x-ms-blob-type': 'BlockBlob'} + headers = {"x-ms-blob-type": "BlockBlob"} response = requests.put(sas_blob.url, headers=headers, data=updated_data) # Assert @@ -2513,15 +2554,15 @@ async def test_get_account_information(self, **kwargs): bc_info = await blob_client.get_account_information() # Assert - assert bsc_info.get('sku_name') is not None - assert bsc_info.get('account_kind') is not None - assert not bsc_info.get('is_hns_enabled') - assert cc_info.get('sku_name') is not None - assert cc_info.get('account_kind') is not None - assert not cc_info.get('is_hns_enabled') - assert bc_info.get('sku_name') is not None - assert bc_info.get('account_kind') is not None - assert not bc_info.get('is_hns_enabled') + assert bsc_info.get("sku_name") is not None + assert bsc_info.get("account_kind") is not None + assert not bsc_info.get("is_hns_enabled") + assert cc_info.get("sku_name") is not None + assert cc_info.get("account_kind") is not None + assert not cc_info.get("is_hns_enabled") + assert bc_info.get("sku_name") is not None + assert bc_info.get("account_kind") is not None + assert not bc_info.get("is_hns_enabled") @BlobPreparer() @recorded_by_proxy_async @@ -2559,32 +2600,30 @@ async def test_get_account_information_sas(self, **kwargs): ) # Act - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=account_token) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=account_token) bsc_info = await bsc.get_account_information() container_client = ContainerClient( - self.account_url(storage_account_name, "blob"), - self.container_name, - credential=container_token) + self.account_url(storage_account_name, "blob"), self.container_name, credential=container_token + ) cc_info = await container_client.get_account_information() blob_client = BlobClient( self.account_url(storage_account_name, "blob"), self.container_name, self._get_blob_reference(), - credential=blob_token) + credential=blob_token, + ) bc_info = await blob_client.get_account_information() # Assert - assert bsc_info.get('sku_name') is not None - assert bsc_info.get('account_kind') is not None - assert not bsc_info.get('is_hns_enabled') - assert cc_info.get('sku_name') is not None - assert cc_info.get('account_kind') is not None - assert not cc_info.get('is_hns_enabled') - assert bc_info.get('sku_name') is not None - assert bc_info.get('account_kind') is not None - assert not bc_info.get('is_hns_enabled') + assert bsc_info.get("sku_name") is not None + assert bsc_info.get("account_kind") is not None + assert not bsc_info.get("is_hns_enabled") + assert cc_info.get("sku_name") is not None + assert cc_info.get("account_kind") is not None + assert not cc_info.get("is_hns_enabled") + assert bc_info.get("sku_name") is not None + assert bc_info.get("account_kind") is not None + assert not bc_info.get("is_hns_enabled") @BlobPreparer() @recorded_by_proxy_async @@ -2599,8 +2638,8 @@ async def test_get_account_information_with_container_name(self, **kwargs): info = await container.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @BlobPreparer() @recorded_by_proxy_async @@ -2615,8 +2654,8 @@ async def test_get_account_information_with_blob_name(self, **kwargs): info = await blob.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2627,10 +2666,17 @@ async def test_get_account_information_with_container_sas(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) container = self.bsc.get_container_client(self.container_name) - permission = ContainerSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - list=True, tag=True, set_immutability_policy=True, - permanent_delete=True) - assert 'y' in str(permission) + permission = ContainerSasPermissions( + read=True, + write=True, + delete=True, + delete_previous_version=True, + list=True, + tag=True, + set_immutability_policy=True, + permanent_delete=True, + ) + assert "y" in str(permission) token = self.generate_sas( generate_container_sas, container.account_name, @@ -2645,8 +2691,8 @@ async def test_get_account_information_with_container_sas(self, **kwargs): info = await sas_container.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2675,8 +2721,8 @@ async def test_get_account_information_with_blob_sas(self, **kwargs): info = await sas_blob.get_account_information() # Assert - assert info.get('sku_name') is not None - assert info.get('account_kind') is not None + assert info.get("sku_name") is not None + assert info.get("account_kind") is not None @pytest.mark.live_test_only @BlobPreparer() @@ -2685,7 +2731,7 @@ async def test_download_to_file_with_sas(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = await self._create_blob(data=data) sas_token = self.generate_sas( @@ -2715,7 +2761,7 @@ async def test_download_to_file_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = await self._create_blob(data=data) # Act @@ -2732,7 +2778,7 @@ async def test_download_to_stream_with_credential(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = await self._create_blob(data=data) # Act @@ -2750,12 +2796,14 @@ async def test_download_to_file_with_existing_file(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = await self._create_blob(data=data) # Act with tempfile.NamedTemporaryFile(delete=False) as temp_file: - await download_blob_from_url(source_blob.url, temp_file.name, credential=storage_account_key, overwrite=True) + await download_blob_from_url( + source_blob.url, temp_file.name, credential=storage_account_key, overwrite=True + ) with pytest.raises(ValueError): await download_blob_from_url(source_blob.url, temp_file.name) @@ -2775,23 +2823,19 @@ async def test_download_to_file_with_existing_file_overwrite(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 source_blob = await self._create_blob(data=data) - file_path = 'file_with_existing_file_overwrite.temp.{}.dat'.format(str(uuid.uuid4())) + file_path = "file_with_existing_file_overwrite.temp.{}.dat".format(str(uuid.uuid4())) # Act - await download_blob_from_url( - source_blob.url, file_path, - credential=storage_account_key) + await download_blob_from_url(source_blob.url, file_path, credential=storage_account_key) - data2 = b'ABC' * 1024 + data2 = b"ABC" * 1024 source_blob = await self._create_blob(data=data2) - await download_blob_from_url( - source_blob.url, file_path, overwrite=True, - credential=storage_account_key) + await download_blob_from_url(source_blob.url, file_path, overwrite=True, credential=storage_account_key) # Assert - with open(file_path, 'rb') as stream: + with open(file_path, "rb") as stream: actual = stream.read() assert data2 == actual self._teardown(file_path) @@ -2804,7 +2848,7 @@ async def test_upload_to_url_bytes_with_sas(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2836,13 +2880,12 @@ async def test_upload_to_url_bytes_with_credential(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - uploaded = await upload_blob_to_url( - blob.url, data, credential=storage_account_key) + uploaded = await upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert assert uploaded is not None @@ -2857,15 +2900,14 @@ async def test_upload_to_url_bytes_with_existing_blob(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b"existing_data") # Act with pytest.raises(ResourceExistsError): - await upload_blob_to_url( - blob.url, data, credential=storage_account_key) + await upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert content = await (await blob.download_blob()).readall() @@ -2879,16 +2921,13 @@ async def test_upload_to_url_bytes_with_existing_blob_overwrite(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b"existing_data") # Act - uploaded = await upload_blob_to_url( - blob.url, data, - overwrite=True, - credential=storage_account_key) + uploaded = await upload_blob_to_url(blob.url, data, overwrite=True, credential=storage_account_key) # Assert assert uploaded is not None @@ -2903,17 +2942,16 @@ async def test_upload_to_url_text_with_credential(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = '123' * 1024 + data = "123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act - uploaded = await upload_blob_to_url( - blob.url, data, credential=storage_account_key) + uploaded = await upload_blob_to_url(blob.url, data, credential=storage_account_key) # Assert assert uploaded is not None - stream = await blob.download_blob(encoding='UTF-8') + stream = await blob.download_blob(encoding="UTF-8") content = await stream.readall() assert data == content @@ -2925,7 +2963,7 @@ async def test_upload_to_url_file_with_credential(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - data = b'123' * 1024 + data = b"123" * 1024 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -2946,9 +2984,11 @@ async def test_transport_closed_only_once(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - container_name = self.get_resource_name('utcontainerasync') + container_name = self.get_resource_name("utcontainerasync") transport = AioHttpTransport() - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=transport) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=transport + ) blob_name = self._get_blob_reference() async with bsc: await bsc.get_service_properties() @@ -2968,46 +3008,51 @@ async def test_blob_immutability_policy(self, **kwargs): await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) await blob.upload_blob(b"abc", overwrite=True) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - resp = await blob.set_immutability_policy( - immutability_policy=immutability_policy) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + resp = await blob.set_immutability_policy(immutability_policy=immutability_policy) # Assert # check immutability policy after set_immutability_policy() props = await blob.get_blob_properties() - assert resp['immutability_policy_until_date'] is not None - assert resp['immutability_policy_mode'] is not None - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None - assert props['immutability_policy']['policy_mode'] == "unlocked" + assert resp["immutability_policy_until_date"] is not None + assert resp["immutability_policy_mode"] is not None + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None + assert props["immutability_policy"]["policy_mode"] == "unlocked" # check immutability policy after delete_immutability_policy() await blob.delete_immutability_policy() props = await blob.get_blob_properties() - assert props['immutability_policy']['policy_mode'] is None - assert props['immutability_policy']['policy_mode'] is None + assert props["immutability_policy"]["policy_mode"] is None + assert props["immutability_policy"]["policy_mode"] is None if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3020,17 +3065,20 @@ async def test_blob_legal_hold(self, **kwargs): await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) await blob.upload_blob(b"abc", overwrite=True) resp = await blob.set_legal_hold(True) @@ -3039,20 +3087,22 @@ async def test_blob_legal_hold(self, **kwargs): with pytest.raises(HttpResponseError): await blob.delete_blob() - assert resp['legal_hold'] - assert props['has_legal_hold'] + assert resp["legal_hold"] + assert props["has_legal_hold"] resp2 = await blob.set_legal_hold(False) props2 = await blob.get_blob_properties() - assert not resp2['legal_hold'] - assert not props2['has_legal_hold'] + assert not resp2["legal_hold"] + assert not props2["has_legal_hold"] if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) @BlobPreparer() @recorded_by_proxy_async @@ -3063,36 +3113,37 @@ async def test_download_blob_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") blob = self.bsc.get_blob_client(container_name, blob_name) content = b"abcedfg" - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - await blob.upload_blob(content, - immutability_policy=immutability_policy, - legal_hold=True, - overwrite=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + await blob.upload_blob(content, immutability_policy=immutability_policy, legal_hold=True, overwrite=True) download_resp = await blob.download_blob() with pytest.raises(HttpResponseError): await blob.delete_blob() - assert download_resp.properties['has_legal_hold'] - assert download_resp.properties['immutability_policy']['expiry_time'] is not None - assert download_resp.properties['immutability_policy']['policy_mode'] is not None + assert download_resp.properties["has_legal_hold"] + assert download_resp.properties["immutability_policy"]["expiry_time"] is not None + assert download_resp.properties["immutability_policy"]["policy_mode"] is not None # Cleanup await blob.set_legal_hold(False) @@ -3102,7 +3153,9 @@ async def test_download_blob_with_immutability_policy(self, **kwargs): await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3115,42 +3168,45 @@ async def test_list_blobs_with_immutability_policy(self, **kwargs): variables = kwargs.pop("variables", {}) await self._setup(versioned_storage_account_name, versioned_storage_account_key) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) # Act - blob_name = self.get_resource_name('vlwblob') + blob_name = self.get_resource_name("vlwblob") container_client = self.bsc.get_container_client(container_name) blob = self.bsc.get_blob_client(container_name, blob_name) content = b"abcedfg" - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - await blob.upload_blob(content, - immutability_policy=immutability_policy, - legal_hold=True, - overwrite=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + await blob.upload_blob(content, immutability_policy=immutability_policy, legal_hold=True, overwrite=True) blob_list = list() - async for blob_prop in container_client.list_blobs(include=['immutabilitypolicy', 'legalhold']): + async for blob_prop in container_client.list_blobs(include=["immutabilitypolicy", "legalhold"]): blob_list.append(blob_prop) - assert blob_list[0]['has_legal_hold'] - assert blob_list[0]['immutability_policy']['expiry_time'] is not None - assert blob_list[0]['immutability_policy']['policy_mode'] is not None + assert blob_list[0]["has_legal_hold"] + assert blob_list[0]["immutability_policy"]["expiry_time"] is not None + assert blob_list[0]["immutability_policy"]["policy_mode"] is not None if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -3181,22 +3237,22 @@ async def test_download_properties(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Assert - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") props = stream.properties data = await stream.readall() assert data is not None assert data == blob_data - assert props['creation_time'] is not None - assert props['content_settings'] is not None - assert props['size'] == len(blob_data) + assert props["creation_time"] is not None + assert props["content_settings"] is not None + assert props["size"] == len(blob_data) @BlobPreparer() @recorded_by_proxy_async @@ -3207,7 +3263,7 @@ async def test_blob_version_id_operations(self, **kwargs): await self._setup(versioned_storage_account_name, versioned_storage_account_key) container = self.bsc.get_container_client(self.container_name) blob_name = self.get_resource_name("utcontainer") - blob_data = b'abc' + blob_data = b"abc" blob_client = container.get_blob_client(blob_name) tags_a = {"color": "red"} tags_b = {"color": "yellow"} @@ -3215,31 +3271,36 @@ async def test_blob_version_id_operations(self, **kwargs): await blob_client.upload_blob(blob_data, overwrite=True) v1_props = await blob_client.get_blob_properties() - v1_blob = BlobClient(self.bsc.url, container_name=self.container_name, blob_name=blob_name, - version_id=v1_props['version_id'], credential=versioned_storage_account_key) + v1_blob = BlobClient( + self.bsc.url, + container_name=self.container_name, + blob_name=blob_name, + version_id=v1_props["version_id"], + credential=versioned_storage_account_key, + ) await blob_client.upload_blob(blob_data * 2, overwrite=True) v2_props = await blob_client.get_blob_properties() - v2_blob = container.get_blob_client(v2_props, version_id=v2_props['version_id']) + v2_blob = container.get_blob_client(v2_props, version_id=v2_props["version_id"]) await blob_client.upload_blob(blob_data * 3, overwrite=True) v3_props = await blob_client.get_blob_properties() await v1_blob.set_standard_blob_tier(StandardBlobTier.Cool) await v1_blob.set_blob_tags(tags_a) - await v2_blob.set_standard_blob_tier(StandardBlobTier.Cool, version_id=v3_props['version_id']) - await v1_blob.set_blob_tags(tags_c, version_id=v3_props['version_id']) + await v2_blob.set_standard_blob_tier(StandardBlobTier.Cool, version_id=v3_props["version_id"]) + await v1_blob.set_blob_tags(tags_c, version_id=v3_props["version_id"]) await v2_blob.set_standard_blob_tier(StandardBlobTier.Hot) await v2_blob.set_blob_tags(tags_b) # Assert assert await (await v1_blob.download_blob()).readall() == blob_data assert await (await v2_blob.download_blob()).readall() == blob_data * 2 - assert await (await v1_blob.download_blob(version_id=v3_props['version_id'])).readall() == blob_data * 3 + assert await (await v1_blob.download_blob(version_id=v3_props["version_id"])).readall() == blob_data * 3 assert await v1_blob.get_blob_tags() == tags_a assert await v2_blob.get_blob_tags() == tags_b - assert await v2_blob.get_blob_tags(version_id=v3_props['version_id']) == tags_c - await v1_blob.delete_blob(version_id=v2_props['version_id']) + assert await v2_blob.get_blob_tags(version_id=v3_props["version_id"]) == tags_c + await v1_blob.delete_blob(version_id=v2_props["version_id"]) assert await v1_blob.exists() is True - assert await v1_blob.exists(version_id=v2_props['version_id']) is False + assert await v1_blob.exists(version_id=v2_props["version_id"]) is False assert await blob_client.exists() is True @BlobPreparer() @@ -3255,8 +3316,9 @@ async def test_storage_account_audience_blob_service_client(self, **kwargs): # Act token_credential = self.get_credential(BlobServiceClient, is_async=True) bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), credential=token_credential, - audience=f'https://{storage_account_name}.blob.core.windows.net' + self.account_url(storage_account_name, "blob"), + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert @@ -3278,8 +3340,11 @@ async def test_storage_account_audience_blob_client(self, **kwargs): # Act token_credential = self.get_credential(BlobClient, is_async=True) blob = BlobClient( - self.bsc.url, container_name=self.container_name, blob_name=blob_name, - credential=token_credential, audience=f'https://{storage_account_name}.blob.core.windows.net' + self.bsc.url, + container_name=self.container_name, + blob_name=blob_name, + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert @@ -3298,11 +3363,11 @@ async def test_oauth_error_handling(self, **kwargs): creds = ClientSecretCredential( "00000000-0000-0000-0000-000000000000", "00000000-0000-0000-0000-000000000000", - "00000000-0000-0000-0000-000000000000" + 'a' + "00000000-0000-0000-0000-000000000000" + "a", ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=creds, retry_total=0) - container = bsc.get_container_client('testing') + container = bsc.get_container_client("testing") # Act with pytest.raises(ClientAuthenticationError): @@ -3317,7 +3382,7 @@ async def test_upload_blob_partial_stream(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_container_client(self.container_name).get_blob_client(self._get_blob_reference()) - data = b'abcde' * 100 + data = b"abcde" * 100 stream = BytesIO(data) length = 207 @@ -3340,7 +3405,7 @@ async def test_upload_blob_partial_stream_chunked(self, **kwargs): self.bsc._config.max_block_size = 1024 blob = self.bsc.get_container_client(self.container_name).get_blob_client(self._get_blob_reference()) - data = b'abcde' * 1024 + data = b"abcde" * 1024 stream = BytesIO(data) length = 3000 @@ -3350,4 +3415,6 @@ async def test_upload_blob_partial_stream_chunked(self, **kwargs): # Assert result = await (await blob.download_blob()).readall() assert result == data[:length] -# ------------------------------------------------------------------------------ \ No newline at end of file + + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_container.py b/sdk/storage/azure-storage-blob/tests/test_container.py index e633f10f5d17..a2e3bd4470ae 100644 --- a/sdk/storage/azure-storage-blob/tests/test_container.py +++ b/sdk/storage/azure-storage-blob/tests/test_container.py @@ -25,21 +25,21 @@ PremiumPageBlobTier, PublicAccess, ResourceTypes, - StandardBlobTier - ) + StandardBlobTier, +) from devtools_testutils import recorded_by_proxy, set_custom_default_matcher from devtools_testutils.storage import LogCaptured, StorageRecordedTestCase from settings.testcase import BlobPreparer -#------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'container' -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +TEST_CONTAINER_PREFIX = "container" +# ------------------------------------------------------------------------------ class TestStorageContainer(StorageRecordedTestCase): - #--Helpers----------------------------------------------------------------- + # --Helpers----------------------------------------------------------------- def _get_container_reference(self, prefix=TEST_CONTAINER_PREFIX): container_name = self.get_resource_name(prefix) return container_name @@ -53,7 +53,7 @@ def _create_container(self, bsc, prefix=TEST_CONTAINER_PREFIX): pass return container - #--Test cases for containers ----------------------------------------- + # --Test cases for containers ----------------------------------------- @BlobPreparer() @recorded_by_proxy def test_create_container(self, **kwargs): @@ -100,7 +100,7 @@ def test_create_container_with_public_access_container(self, **kwargs): # Act container = bsc.get_container_client(container_name) - created = container.create_container(public_access='container') + created = container.create_container(public_access="container") # Assert assert created @@ -117,15 +117,14 @@ def test_create_container_with_public_access_blob(self, **kwargs): # Act container = bsc.get_container_client(container_name) - created = container.create_container(public_access='blob') + created = container.create_container(public_access="blob") blob = container.get_blob_client("blob1") - blob.upload_blob(u'xyz') + blob.upload_blob("xyz") anonymous_service = BlobClient( - self.account_url(storage_account_name, "blob"), - container_name=container_name, - blob_name="blob1") + self.account_url(storage_account_name, "blob"), container_name=container_name, blob_name="blob1" + ) # Assert assert created @@ -139,7 +138,7 @@ def test_create_container_with_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container_name = self._get_container_reference() - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act container = bsc.get_container_client(container_name) @@ -158,7 +157,7 @@ def test_container_exists_with_lease(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act exists = container.get_container_properties() @@ -193,8 +192,7 @@ def test_rename_container(self, **kwargs): @pytest.mark.skip(reason="Feature not yet enabled. Make sure to record this test once enabled.") @BlobPreparer() - def test_rename_container_with_container_client( - self, storage_account_name, storage_account_key): + def test_rename_container_with_container_client(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) old_name1 = self._get_container_reference(prefix="oldcontainer1") old_name2 = self._get_container_reference(prefix="oldcontainer2") @@ -242,7 +240,7 @@ def test_unicode_create_container_unicode_name(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container_name = u'啊齄丂狛狜' + container_name = "啊齄丂狛狜" container = bsc.get_container_client(container_name) # Act @@ -316,13 +314,11 @@ def test_list_containers_with_include_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} resp = container.set_container_metadata(metadata) # Act - containers = list(bsc.list_containers( - name_starts_with=container.container_name, - include_metadata=True)) + containers = list(bsc.list_containers(name_starts_with=container.container_name, include_metadata=True)) # Assert assert containers is not None @@ -341,12 +337,12 @@ def test_list_containers_with_public_access(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} resp = container.set_container_access_policy(signed_identifiers, public_access=PublicAccess.Blob) # Act @@ -368,7 +364,7 @@ def test_list_containers_with_num_results_and_marker(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - prefix = 'listcontainersync' + prefix = "listcontainersync" container_names = [] for i in range(0, 4): container_names.append(self._create_container(bsc, prefix + str(i)).container_name) @@ -379,8 +375,9 @@ def test_list_containers_with_num_results_and_marker(self, **kwargs): generator1 = bsc.list_containers(name_starts_with=prefix, results_per_page=2).by_page() containers1 = list(next(generator1)) - generator2 = bsc.list_containers( - name_starts_with=prefix, results_per_page=2).by_page(generator1.continuation_token) + generator2 = bsc.list_containers(name_starts_with=prefix, results_per_page=2).by_page( + generator1.continuation_token + ) containers2 = list(next(generator2)) # Assert @@ -408,7 +405,7 @@ def test_list_containers_account_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(service=True), permission=AccountSasPermissions(list=True), - expiry=datetime.utcnow() + timedelta(hours=3) + expiry=datetime.utcnow() + timedelta(hours=3), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=sas_token) @@ -429,7 +426,7 @@ def test_set_container_metadata(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container = self._create_container(bsc) # Act @@ -445,9 +442,9 @@ def test_set_container_metadata_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container = self._create_container(bsc) - lease_id = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act container.set_container_metadata(metadata, lease=lease_id) @@ -468,7 +465,7 @@ def test_set_container_metadata_with_non_existing_container(self, **kwargs): # Act with pytest.raises(ResourceNotFoundError): - container.set_container_metadata({'hello': 'world', 'number': '43'}) + container.set_container_metadata({"hello": "world", "number": "43"}) # Assert @@ -479,7 +476,7 @@ def test_get_container_metadata(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = self._create_container(bsc) container.set_container_metadata(metadata) @@ -496,10 +493,10 @@ def test_get_container_metadata_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = self._create_container(bsc) container.set_container_metadata(metadata) - lease_id = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act md = container.get_container_properties(lease=lease_id).metadata @@ -529,7 +526,7 @@ def test_get_container_properties(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = self._create_container(bsc) container.set_container_metadata(metadata) @@ -550,10 +547,10 @@ def test_get_container_properties_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = self._create_container(bsc) container.set_container_metadata(metadata) - lease_id = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act props = container.get_container_properties(lease=lease_id) @@ -562,9 +559,9 @@ def test_get_container_properties_with_lease_id(self, **kwargs): # Assert assert props is not None assert props.metadata == metadata - assert props.lease.duration == 'infinite' - assert props.lease.state == 'leased' - assert props.lease.status == 'locked' + assert props.lease.duration == "infinite" + assert props.lease.state == "leased" + assert props.lease.status == "locked" @BlobPreparer() @recorded_by_proxy @@ -580,8 +577,8 @@ def test_get_container_acl(self, **kwargs): # Assert assert acl is not None - assert acl.get('public_access') is None - assert len(acl.get('signed_identifiers')) == 0 + assert acl.get("public_access") is None + assert len(acl.get("signed_identifiers")) == 0 @BlobPreparer() @recorded_by_proxy @@ -591,14 +588,14 @@ def test_get_container_acl_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - lease_id = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act acl = container.get_container_access_policy(lease=lease_id) # Assert assert acl is not None - assert acl.get('public_access') is None + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy @@ -611,22 +608,22 @@ def test_set_container_acl(self, **kwargs): container = self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} response = container.set_container_access_policy(signed_identifier) - assert response.get('etag') is not None - assert response.get('last_modified') is not None + assert response.get("etag") is not None + assert response.get("last_modified") is not None # Assert acl = container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 1 - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 1 + assert acl.get("public_access") is None return variables @@ -641,18 +638,18 @@ def test_set_container_acl_with_one_signed_identifier(self, **kwargs): container = self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} response = container.set_container_access_policy(signed_identifier) # Assert - assert response.get('etag') is not None - assert response.get('last_modified') is not None + assert response.get("etag") is not None + assert response.get("last_modified") is not None return variables @@ -665,22 +662,22 @@ def test_set_container_acl_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - lease_id = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifiers = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifiers = {"testid": access_policy} container.set_container_access_policy(signed_identifiers, lease=lease_id) # Assert acl = container.get_container_access_policy() assert acl is not None - assert acl.get('public_access') is None + assert acl.get("public_access") is None return variables @@ -695,12 +692,12 @@ def test_set_container_acl_with_public_access(self, **kwargs): container = self._create_container(bsc) # Act - container.set_container_access_policy(signed_identifiers=dict(), public_access='container') + container.set_container_access_policy(signed_identifiers=dict(), public_access="container") # Assert acl = container.get_container_access_policy() assert acl is not None - assert 'container' == acl.get('public_access') + assert "container" == acl.get("public_access") @BlobPreparer() @recorded_by_proxy @@ -717,8 +714,8 @@ def test_set_container_acl_with_empty_signed_identifiers(self, **kwargs): # Assert acl = container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 0 - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 0 + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy @@ -728,7 +725,7 @@ def test_set_container_acl_with_empty_access_policy(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - identifier = {'empty': None} + identifier = {"empty": None} # Act container.set_container_access_policy(identifier) @@ -736,8 +733,8 @@ def test_set_container_acl_with_empty_access_policy(self, **kwargs): # Assert acl = container.get_container_access_policy() assert acl is not None - assert 'empty' == acl.get('signed_identifiers')[0].id - assert acl.get('signed_identifiers')[0].access_policy is None + assert "empty" == acl.get("signed_identifiers")[0].id + assert acl.get("signed_identifiers")[0].access_policy is None @BlobPreparer() @recorded_by_proxy @@ -750,19 +747,19 @@ def test_set_container_acl_with_signed_identifiers(self, **kwargs): container = self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow() - timedelta(minutes=1)) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - identifiers = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + identifiers = {"testid": access_policy} container.set_container_access_policy(identifiers) # Assert acl = container.get_container_access_policy() assert acl is not None - assert 'testid' == acl.get('signed_identifiers')[0].id - assert acl.get('public_access') is None + assert "testid" == acl.get("signed_identifiers")[0].id + assert acl.get("public_access") is None return variables @@ -782,10 +779,10 @@ def test_set_container_acl_with_empty_identifiers(self, **kwargs): # Assert acl = container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 2 - assert '0' == acl.get('signed_identifiers')[0].id - assert acl.get('signed_identifiers')[0].access_policy is None - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 2 + assert "0" == acl.get("signed_identifiers")[0].id + assert acl.get("signed_identifiers")[0].access_policy is None + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy @@ -797,11 +794,11 @@ def test_set_container_acl_with_three_identifiers(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow() - timedelta(minutes=1)) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) identifiers = {i: access_policy for i in range(3)} # Act @@ -809,14 +806,13 @@ def test_set_container_acl_with_three_identifiers(self, **kwargs): # Assert acl = container.get_container_access_policy() - assert 3 == len(acl.get('signed_identifiers')) - assert '0' == acl.get('signed_identifiers')[0].id - assert acl.get('signed_identifiers')[0].access_policy is not None - assert acl.get('public_access') is None + assert 3 == len(acl.get("signed_identifiers")) + assert "0" == acl.get("signed_identifiers")[0].id + assert acl.get("signed_identifiers")[0].access_policy is not None + assert acl.get("public_access") is None return variables - @BlobPreparer() @recorded_by_proxy def test_set_container_acl_too_many_ids(self, **kwargs): @@ -829,12 +825,15 @@ def test_set_container_acl_too_many_ids(self, **kwargs): # Act identifiers = dict() for i in range(0, 6): - identifiers['id{}'.format(i)] = AccessPolicy() + identifiers["id{}".format(i)] = AccessPolicy() # Assert with pytest.raises(ValueError) as e: container_name.set_container_access_policy(identifiers) - assert str(e.value.args[0]) == 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.' + assert ( + str(e.value.args[0]) + == "Too many access policies provided. The server does not support setting more than 5 access policies on a single resource." + ) @BlobPreparer() @recorded_by_proxy @@ -846,7 +845,7 @@ def test_lease_container_acquire_and_release(self, **kwargs): container = self._create_container(bsc) # Act - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") lease.release() # Assert @@ -859,7 +858,7 @@ def test_lease_container_renew(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) self.sleep(10) lease_id_start = lease.id @@ -884,7 +883,7 @@ def test_lease_container_break_period(self, **kwargs): container = self._create_container(bsc) # Act - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert lease.break_lease(lease_break_period=5) @@ -900,7 +899,7 @@ def test_lease_container_break_released_lease_fails(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") lease.release() # Act @@ -919,13 +918,13 @@ def test_lease_container_with_duration(self, **kwargs): container = self._create_container(bsc) # Act - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert with pytest.raises(HttpResponseError): container.acquire_lease() self.sleep(17) - container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") @BlobPreparer() @recorded_by_proxy @@ -937,7 +936,7 @@ def test_lease_container_twice(self, **kwargs): container = self._create_container(bsc) # Act - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert lease2 = container.acquire_lease(lease_id=lease.id) @@ -953,7 +952,7 @@ def test_lease_container_with_proposed_lease_id(self, **kwargs): container = self._create_container(bsc) # Act - proposed_lease_id = '55e97f64-73e8-4390-838d-d9e84a374321' + proposed_lease_id = "55e97f64-73e8-4390-838d-d9e84a374321" lease = container.acquire_lease(lease_id=proposed_lease_id) # Assert @@ -969,8 +968,8 @@ def test_lease_container_change_lease_id(self, **kwargs): container = self._create_container(bsc) # Act - lease_id = '29e0b239-ecda-4f69-bfa3-95f6af91464c' - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = "29e0b239-ecda-4f69-bfa3-95f6af91464c" + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") lease_id1 = lease.id lease.change(proposed_lease_id=lease_id) lease.renew() @@ -1013,7 +1012,7 @@ def test_delete_container_with_non_existing_container_fail_not_exist(self, **kwa container.delete_container() log_as_str = log_captured.getvalue() - #assert 'ERROR' in log_as_str + # assert 'ERROR' in log_as_str # Assert @@ -1025,7 +1024,7 @@ def test_delete_container_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - lease = container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Act deleted = container.delete_container(lease=lease) @@ -1108,16 +1107,15 @@ def test_list_names(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - - container.get_blob_client('blob1').upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) + data = b"hello world" + container.get_blob_client("blob1").upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) # Act blobs = [b.name for b in container.list_blobs()] - assert blobs, ['blob1' == 'blob2'] + assert blobs, ["blob1" == "blob2"] @pytest.mark.playback_test_only @BlobPreparer() @@ -1128,9 +1126,9 @@ def test_list_blobs_contains_last_access_time(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - blob_client = container.get_blob_client('blob1') + blob_client = container.get_blob_client("blob1") blob_client.upload_blob(data, standard_blob_tier=StandardBlobTier.Archive) # Act @@ -1145,9 +1143,9 @@ def test_list_blobs_returns_rehydrate_priority(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - blob_client = container.get_blob_client('blob1') + blob_client = container.get_blob_client("blob1") blob_client.upload_blob(data, standard_blob_tier=StandardBlobTier.Archive) blob_client.set_standard_blob_tier(StandardBlobTier.Hot) @@ -1164,9 +1162,9 @@ def test_list_blobs_cold_tier(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - blob_client = container.get_blob_client('blob1') + blob_client = container.get_blob_client("blob1") blob_client.upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) # Act @@ -1181,9 +1179,9 @@ def test_list_blobs(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - container.get_blob_client('blob1').upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) + data = b"hello world" + container.get_blob_client("blob1").upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) # Act blobs = list(container.list_blobs()) @@ -1192,10 +1190,10 @@ def test_list_blobs(self, **kwargs): assert blobs is not None assert len(blobs) >= 2 assert blobs[0] is not None - self.assertNamedItemInContainer(blobs, 'blob1') - self.assertNamedItemInContainer(blobs, 'blob2') + self.assertNamedItemInContainer(blobs, "blob1") + self.assertNamedItemInContainer(blobs, "blob2") assert blobs[0].size == 11 - assert blobs[1].content_settings.content_type == 'application/octet-stream' + assert blobs[1].content_settings.content_type == "application/octet-stream" assert blobs[0].creation_time is not None @BlobPreparer() @@ -1207,7 +1205,7 @@ def test_list_encoded_blobs(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) blob_name = "dir1/dir2/file\uFFFF.blob" container = self._create_container(bsc, prefix="cont1") - data = b'hello world' + data = b"hello world" bc = container.get_blob_client(blob_name) bc.upload_blob(data) props = bc.get_blob_properties() @@ -1225,14 +1223,14 @@ def test_list_blobs_with_object_replication_policy(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container = bsc.get_container_client('orp-source') - data = b'hello world' - b_c = container.get_blob_client('blob1') + container = bsc.get_container_client("orp-source") + data = b"hello world" + b_c = container.get_blob_client("blob1") b_c.upload_blob(data, overwrite=True) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} b_c.set_blob_metadata(metadata) - container.get_blob_client('blob2').upload_blob(data, overwrite=True) + container.get_blob_client("blob2").upload_blob(data, overwrite=True) # Act blobs_list = container.list_blobs() @@ -1252,10 +1250,10 @@ def test_list_blobs_leased_blob(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') + data = b"hello world" + blob1 = container.get_blob_client("blob1") blob1.upload_blob(data) - lease = blob1.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob1.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act resp = list(container.list_blobs()) @@ -1264,11 +1262,11 @@ def test_list_blobs_leased_blob(self, **kwargs): assert resp is not None assert len(resp) >= 1 assert resp[0] is not None - self.assertNamedItemInContainer(resp, 'blob1') + self.assertNamedItemInContainer(resp, "blob1") assert resp[0].size == 11 - assert resp[0].lease.duration == 'infinite' - assert resp[0].lease.status == 'locked' - assert resp[0].lease.state == 'leased' + assert resp[0].lease.duration == "infinite" + assert resp[0].lease.status == "locked" + assert resp[0].lease.state == "leased" @BlobPreparer() @recorded_by_proxy @@ -1278,19 +1276,19 @@ def test_list_blobs_with_prefix(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - container.get_blob_client('blob_a1').upload_blob(data) - container.get_blob_client('blob_a2').upload_blob(data) - container.get_blob_client('blob_b1').upload_blob(data) + data = b"hello world" + container.get_blob_client("blob_a1").upload_blob(data) + container.get_blob_client("blob_a2").upload_blob(data) + container.get_blob_client("blob_b1").upload_blob(data) # Act - resp = list(container.list_blobs(name_starts_with='blob_a')) + resp = list(container.list_blobs(name_starts_with="blob_a")) # Assert assert resp is not None assert len(resp) == 2 - self.assertNamedItemInContainer(resp, 'blob_a1') - self.assertNamedItemInContainer(resp, 'blob_a2') + self.assertNamedItemInContainer(resp, "blob_a1") + self.assertNamedItemInContainer(resp, "blob_a2") @BlobPreparer() @recorded_by_proxy @@ -1300,12 +1298,11 @@ def test_list_blobs_with_num_results(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - container.get_blob_client('blob_a1').upload_blob(data) - container.get_blob_client('blob_a2').upload_blob(data) - container.get_blob_client('blob_a3').upload_blob(data) - container.get_blob_client('blob_b1').upload_blob(data) - + data = b"hello world" + container.get_blob_client("blob_a1").upload_blob(data) + container.get_blob_client("blob_a2").upload_blob(data) + container.get_blob_client("blob_a3").upload_blob(data) + container.get_blob_client("blob_b1").upload_blob(data) # Act blobs = list(next(container.list_blobs(results_per_page=2).by_page())) @@ -1313,8 +1310,8 @@ def test_list_blobs_with_num_results(self, **kwargs): # Assert assert blobs is not None assert len(blobs) == 2 - self.assertNamedItemInContainer(blobs, 'blob_a1') - self.assertNamedItemInContainer(blobs, 'blob_a2') + self.assertNamedItemInContainer(blobs, "blob_a1") + self.assertNamedItemInContainer(blobs, "blob_a2") @BlobPreparer() @recorded_by_proxy @@ -1324,22 +1321,22 @@ def test_list_blobs_with_include_snapshots(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') + data = b"hello world" + blob1 = container.get_blob_client("blob1") blob1.upload_blob(data) blob1.create_snapshot() - container.get_blob_client('blob2').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) # Act blobs = list(container.list_blobs(include="snapshots")) # Assert assert len(blobs) == 3 - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].snapshot is not None - assert blobs[1].name == 'blob1' + assert blobs[1].name == "blob1" assert blobs[1].snapshot is None - assert blobs[2].name == 'blob2' + assert blobs[2].name == "blob2" assert blobs[2].snapshot is None @BlobPreparer() @@ -1350,29 +1347,31 @@ def test_list_blobs_with_include_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob1 = container.get_blob_client('blob1') - blob1.upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '1', 'name': 'bob'}) + data = b"hello world" + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob1 = container.get_blob_client("blob1") + blob1.upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "1", "name": "bob"} + ) blob1.create_snapshot() - container.get_blob_client('blob2').upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '2', 'name': 'car'}) + container.get_blob_client("blob2").upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "2", "name": "car"} + ) # Act - blobs =list(container.list_blobs(include="metadata")) + blobs = list(container.list_blobs(include="metadata")) # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[0].metadata['number'] == '1' - assert blobs[0].metadata['name'] == 'bob' - assert blobs[1].name == 'blob2' - assert blobs[1].metadata['number'] == '2' - assert blobs[1].metadata['name'] == 'car' - assert blobs[1].content_settings.content_language == 'spanish' - assert blobs[1].content_settings.content_disposition == 'inline' + assert blobs[0].name == "blob1" + assert blobs[0].metadata["number"] == "1" + assert blobs[0].metadata["name"] == "bob" + assert blobs[1].name == "blob2" + assert blobs[1].metadata["number"] == "2" + assert blobs[1].metadata["name"] == "car" + assert blobs[1].content_settings.content_language == "spanish" + assert blobs[1].content_settings.content_disposition == "inline" @BlobPreparer() @recorded_by_proxy @@ -1382,34 +1381,38 @@ def test_list_blobs_include_deletedwithversion(self, **kwargs): bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = self._create_container(bsc) - data = b'hello world' - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob1 = container.get_blob_client('blob1') - resp = blob1.upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '1', 'name': 'bob'}) - version_id_1 = resp['version_id'] + data = b"hello world" + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob1 = container.get_blob_client("blob1") + resp = blob1.upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "1", "name": "bob"} + ) + version_id_1 = resp["version_id"] blob1.upload_blob(b"abc", overwrite=True) root_content = b"cde" - root_version_id = blob1.upload_blob(root_content, overwrite=True)['version_id'] + root_version_id = blob1.upload_blob(root_content, overwrite=True)["version_id"] blob1.delete_blob() - container.get_blob_client('blob2').upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '2', 'name': 'car'}) - container.get_blob_client('blob3').upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '2', 'name': 'car'}) + container.get_blob_client("blob2").upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "2", "name": "car"} + ) + container.get_blob_client("blob3").upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "2", "name": "car"} + ) # Act - blobs =list(container.list_blobs(include=["deletedwithversions"])) + blobs = list(container.list_blobs(include=["deletedwithversions"])) downloaded_root_content = blob1.download_blob(version_id=root_version_id).readall() downloaded_original_content = blob1.download_blob(version_id=version_id_1).readall() # Assert - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].has_versions_only assert root_content == downloaded_root_content assert data == downloaded_original_content - assert blobs[1].name == 'blob2' + assert blobs[1].name == "blob2" assert not blobs[1].has_versions_only - assert blobs[2].name == 'blob3' + assert blobs[2].name == "blob3" assert not blobs[2].has_versions_only @BlobPreparer() @@ -1420,22 +1423,22 @@ def test_list_blobs_with_include_uncommittedblobs(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') - blob1.stage_block('1', b'AAA') - blob1.stage_block('2', b'BBB') - blob1.stage_block('3', b'CCC') + data = b"hello world" + blob1 = container.get_blob_client("blob1") + blob1.stage_block("1", b"AAA") + blob1.stage_block("2", b"BBB") + blob1.stage_block("3", b"CCC") - blob2 = container.get_blob_client('blob2') - blob2.upload_blob(data, metadata={'number': '2', 'name': 'car'}) + blob2 = container.get_blob_client("blob2") + blob2.upload_blob(data, metadata={"number": "2", "name": "car"}) # Act blobs = list(container.list_blobs(include="uncommittedblobs")) # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[1].name == 'blob2' + assert blobs[0].name == "blob1" + assert blobs[1].name == "blob2" @BlobPreparer() @recorded_by_proxy @@ -1445,36 +1448,36 @@ def test_list_blobs_with_include_copy(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - container.get_blob_client('blob1').upload_blob(data, metadata={'status': 'original'}) - sourceblob = 'https://{0}.blob.core.windows.net/{1}/blob1'.format( - storage_account_name, - container.container_name) + data = b"hello world" + container.get_blob_client("blob1").upload_blob(data, metadata={"status": "original"}) + sourceblob = "https://{0}.blob.core.windows.net/{1}/blob1".format( + storage_account_name, container.container_name + ) - blobcopy = container.get_blob_client('blob1copy') - blobcopy.start_copy_from_url(sourceblob, metadata={'status': 'copy'}) + blobcopy = container.get_blob_client("blob1copy") + blobcopy.start_copy_from_url(sourceblob, metadata={"status": "copy"}) # Act blobs = list(container.list_blobs(include="copy")) # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[1].name == 'blob1copy' + assert blobs[0].name == "blob1" + assert blobs[1].name == "blob1copy" assert blobs[1].blob_type == blobs[0].blob_type assert blobs[1].size == 11 - assert blobs[1].content_settings.content_type == 'application/octet-stream' + assert blobs[1].content_settings.content_type == "application/octet-stream" assert blobs[1].content_settings.cache_control == None assert blobs[1].content_settings.content_encoding == None assert blobs[1].content_settings.content_language == None assert blobs[1].content_settings.content_disposition == None assert blobs[1].content_settings.content_md5 != None - assert blobs[1].lease.status == 'unlocked' - assert blobs[1].lease.state == 'available' + assert blobs[1].lease.status == "unlocked" + assert blobs[1].lease.state == "available" assert blobs[1].copy.id != None assert blobs[1].copy.source == sourceblob - assert blobs[1].copy.status == 'success' - assert blobs[1].copy.progress == '11/11' + assert blobs[1].copy.status == "success" + assert blobs[1].copy.progress == "11/11" assert blobs[1].copy.completion_time != None @BlobPreparer() @@ -1485,12 +1488,12 @@ def test_list_blobs_with_delimiter(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('a/blob1').upload_blob(data) - container.get_blob_client('a/blob2').upload_blob(data) - container.get_blob_client('b/blob3').upload_blob(data) - container.get_blob_client('blob4').upload_blob(data) + container.get_blob_client("a/blob1").upload_blob(data) + container.get_blob_client("a/blob2").upload_blob(data) + container.get_blob_client("b/blob3").upload_blob(data) + container.get_blob_client("blob4").upload_blob(data) # Act resp = list(container.walk_blobs()) @@ -1498,9 +1501,9 @@ def test_list_blobs_with_delimiter(self, **kwargs): # Assert assert resp is not None assert len(resp) == 3 - self.assertNamedItemInContainer(resp, 'a/') - self.assertNamedItemInContainer(resp, 'b/') - self.assertNamedItemInContainer(resp, 'blob4') + self.assertNamedItemInContainer(resp, "a/") + self.assertNamedItemInContainer(resp, "b/") + self.assertNamedItemInContainer(resp, "blob4") @BlobPreparer() @recorded_by_proxy @@ -1509,17 +1512,17 @@ def test_find_blobs_by_tags(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container = self._create_container(bsc, 'testfind') + container = self._create_container(bsc, "testfind") - data = b'hello world' + data = b"hello world" tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - other_tags = {'tag1' : 'other'} + other_tags = {"tag1": "other"} filter_expression = "tag1='firsttag' and tag2='secondtag'" - container.get_blob_client('blob1').upload_blob(data, tags=tags) - container.get_blob_client('blob2').upload_blob(data, tags=tags) - container.get_blob_client('blob3').upload_blob(data, tags=tags) - container.get_blob_client('blob4').upload_blob(data, tags=other_tags) + container.get_blob_client("blob1").upload_blob(data, tags=tags) + container.get_blob_client("blob2").upload_blob(data, tags=tags) + container.get_blob_client("blob3").upload_blob(data, tags=tags) + container.get_blob_client("blob4").upload_blob(data, tags=other_tags) if self.is_live: sleep(10) @@ -1534,9 +1537,9 @@ def test_find_blobs_by_tags(self, **kwargs): # Assert assert 2 == len(items_on_page1) assert 1 == len(items_on_page2) - assert len(items_on_page2[0]['tags']) == 2 - assert items_on_page2[0]['tags']['tag1'] == 'firsttag' - assert items_on_page2[0]['tags']['tag2'] == 'secondtag' + assert len(items_on_page2[0]["tags"]) == 2 + assert items_on_page2[0]["tags"]["tag1"] == "firsttag" + assert items_on_page2[0]["tags"]["tag2"] == "secondtag" @pytest.mark.live_test_only @BlobPreparer() @@ -1547,12 +1550,12 @@ def test_find_blobs_by_tags_container_sas(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" tags = {"tag1": "tagone", "tag2": "tagtwo", "tag3": "thirdtag"} filter_expression = "tag1='tagone' and tag2='tagtwo'" - container.get_blob_client('blob1').upload_blob(data, tags=tags) - container.get_blob_client('blob2').upload_blob(data, tags=tags) + container.get_blob_client("blob1").upload_blob(data, tags=tags) + container.get_blob_client("blob2").upload_blob(data, tags=tags) if self.is_live: sleep(10) @@ -1564,7 +1567,7 @@ def test_find_blobs_by_tags_container_sas(self, **kwargs): container.container_name, account_key=storage_account_key, permission=ContainerSasPermissions(filter_by_tags=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) container = ContainerClient.from_container_url(container.url, credential=sas_token) @@ -1589,21 +1592,21 @@ def test_delete_blobs_simple(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") blob_client1.upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) except: pass # Act response = container.delete_blobs( blob_client1.get_blob_properties(), - 'blob2', - 'blob3', + "blob2", + "blob3", ) response = list(response) assert len(response) == 3 @@ -1621,7 +1624,7 @@ def test_delete_blob_with_properties_versioning(self, **kwargs): container: ContainerClient = self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) blob_client.upload_blob(blob_data, overwrite=True) @@ -1632,15 +1635,17 @@ def test_delete_blob_with_properties_versioning(self, **kwargs): v3_props = blob_client.get_blob_properties() # Act - container.delete_blob(v2_props, version_id=v1_props['version_id']) + container.delete_blob(v2_props, version_id=v1_props["version_id"]) container.delete_blob(v2_props) # Assert with pytest.raises(HttpResponseError): deleted = container.get_blob_client(v1_props) deleted.get_blob_properties() - assert blob_client.get_blob_properties(version_id=v3_props['version_id']).get("version_id") == v3_props[ - 'version_id'] + assert ( + blob_client.get_blob_properties(version_id=v3_props["version_id"]).get("version_id") + == v3_props["version_id"] + ) @pytest.mark.live_test_only @BlobPreparer() @@ -1652,31 +1657,28 @@ def test_delete_blobs_with_version_id(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - blob = bsc.get_blob_client(container.container_name, 'blob1') + blob = bsc.get_blob_client(container.container_name, "blob1") blob.upload_blob(data, length=len(data)) - container.get_blob_client('blob2').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) except: pass # Act - blob = bsc.get_blob_client(container.container_name, 'blob1') + blob = bsc.get_blob_client(container.container_name, "blob1") old_blob_version_id = blob.get_blob_properties().get("version_id") - blob.stage_block(block_id='1', data="Test Content") - blob.commit_block_list(['1']) + blob.stage_block(block_id="1", data="Test Content") + blob.commit_block_list(["1"]) new_blob_version_id = blob.get_blob_properties().get("version_id") assert old_blob_version_id != new_blob_version_id blob1_del_data = dict() - blob1_del_data['name'] = 'blob1' - blob1_del_data['version_id'] = old_blob_version_id + blob1_del_data["name"] = "blob1" + blob1_del_data["version_id"] = old_blob_version_id - response = container.delete_blobs( - blob1_del_data, - 'blob2' - ) + response = container.delete_blobs(blob1_del_data, "blob2") # Assert response = list(response) @@ -1696,7 +1698,7 @@ def test_delete_blobs_with_properties_versioning(self, **kwargs): container: ContainerClient = self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) blob_client.upload_blob(blob_data, overwrite=True) @@ -1707,10 +1709,7 @@ def test_delete_blobs_with_properties_versioning(self, **kwargs): v3_props = blob_client.get_blob_properties() # Act - response = container.delete_blobs( - v1_props, - v2_props - ) + response = container.delete_blobs(v1_props, v2_props) remaining_blob = container.get_blob_client(v3_props) # Assert @@ -1718,8 +1717,10 @@ def test_delete_blobs_with_properties_versioning(self, **kwargs): assert len(response) == 2 assert response[0].status_code == 202 assert response[1].status_code == 202 - assert remaining_blob.get_blob_properties(version_id=v3_props['version_id']).get("version_id") == v3_props[ - 'version_id'] + assert ( + remaining_blob.get_blob_properties(version_id=v3_props["version_id"]).get("version_id") + == v3_props["version_id"] + ) @pytest.mark.live_test_only @BlobPreparer() @@ -1736,26 +1737,26 @@ def test_batch_blobs_with_container_sas(self, **kwargs): container_name, account_key=storage_account_key, permission=ContainerSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) container_client = bsc.get_container_client(container_name) container_client.create_container() container = ContainerClient.from_container_url(container_client.url, credential=sas_token) - data = b'hello world' + data = b"hello world" try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") blob_client1.upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) except: pass # Act response = container.delete_blobs( blob_client1.get_blob_properties(), - 'blob2', - 'blob3', + "blob2", + "blob3", ) response = list(response) assert len(response) == 3 @@ -1773,14 +1774,14 @@ def test_delete_blobs_with_if_tags(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(blob_storage_account_name, "blob"), blob_storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") blob_client1.upload_blob(data, overwrite=True, tags=tags) - container.get_blob_client('blob2').upload_blob(data, overwrite=True, tags=tags) - container.get_blob_client('blob3').upload_blob(data, overwrite=True, tags=tags) + container.get_blob_client("blob2").upload_blob(data, overwrite=True, tags=tags) + container.get_blob_client("blob3").upload_blob(data, overwrite=True, tags=tags) except: pass @@ -1789,18 +1790,8 @@ def test_delete_blobs_with_if_tags(self, **kwargs): # Act with pytest.raises(PartialBatchErrorException): - container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag WRONG'" - ) - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag'" - ) + container.delete_blobs("blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag WRONG'") + response = container.delete_blobs("blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag'") response = list(response) assert len(response) == 3 assert response[0].status_code == 202 @@ -1820,41 +1811,35 @@ def test_delete_blobs_and_snapshot_using_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), sas_token) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" # blob with snapshot - blob_client1 = container.get_blob_client('bloba') + blob_client1 = container.get_blob_client("bloba") blob_client1.upload_blob(data, overwrite=True) snapshot = blob_client1.create_snapshot() - container.get_blob_client('blobb').upload_blob(data, overwrite=True) - container.get_blob_client('blobc').upload_blob(data, overwrite=True) + container.get_blob_client("blobb").upload_blob(data, overwrite=True) + container.get_blob_client("blobc").upload_blob(data, overwrite=True) # blob with lease - blob_client4 = container.get_blob_client('blobd') + blob_client4 = container.get_blob_client("blobd") blob_client4.upload_blob(data, overwrite=True) lease = blob_client4.acquire_lease() # Act blob_props = blob_client1.get_blob_properties() - blob_props.snapshot = snapshot['snapshot'] + blob_props.snapshot = snapshot["snapshot"] blob_props_d = dict() - blob_props_d['name'] = "blobd" - blob_props_d['delete_snapshots'] = "include" - blob_props_d['lease_id'] = lease.id + blob_props_d["name"] = "blobd" + blob_props_d["delete_snapshots"] = "include" + blob_props_d["lease_id"] = lease.id - response = container.delete_blobs( - blob_props, - 'blobb', - 'blobc', - blob_props_d, - timeout=3 - ) + response = container.delete_blobs(blob_props, "blobb", "blobc", blob_props_d, timeout=3) response = list(response) assert len(response) == 4 assert response[0].status_code == 202 @@ -1872,22 +1857,17 @@ def test_delete_blobs_simple_no_raise(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - container.get_blob_client('blob1').upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob1").upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) except: pass # Act - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) assert len(response) == 3 assert response[0].status_code == 202 assert response[1].status_code == 202 @@ -1903,27 +1883,22 @@ def test_delete_blobs_snapshot(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc, prefix="test") - data = b'hello world' + data = b"hello world" try: - blob1_client = container.get_blob_client('blob1') + blob1_client = container.get_blob_client("blob1") blob1_client.upload_blob(data) blob1_client.create_snapshot() - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) except: pass - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 4 # 3 blobs + 1 snapshot # Act try: - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - delete_snapshots='only' - ) + response = container.delete_blobs("blob1", "blob2", "blob3", delete_snapshots="only") except PartialBatchErrorException as err: parts = list(err.parts) assert len(parts) == 3 @@ -1931,7 +1906,7 @@ def test_delete_blobs_snapshot(self, **kwargs): assert parts[1].status_code == 404 # There was no snapshot assert parts[2].status_code == 404 # There was no snapshot - blobs = list(container.list_blobs(include='snapshots')) + blobs = list(container.list_blobs(include="snapshots")) assert len(blobs) == 3 # 3 blobs @pytest.mark.live_test_only @@ -1946,17 +1921,12 @@ def test_standard_blob_tier_set_tier_api_batch(self, **kwargs): tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) - blob = container.get_blob_client('blob1') - data = b'hello world' + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) + blob = container.get_blob_client("blob1") + data = b"hello world" blob.upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) blob_ref = blob.get_blob_properties() assert blob_ref.blob_tier is not None @@ -1965,9 +1935,9 @@ def test_standard_blob_tier_set_tier_api_batch(self, **kwargs): parts = container.set_standard_blob_tier_blobs( tier, - 'blob1', - 'blob2', - 'blob3', + "blob1", + "blob2", + "blob3", ) parts = list(parts) @@ -1982,12 +1952,7 @@ def test_standard_blob_tier_set_tier_api_batch(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.live_test_only @BlobPreparer() @@ -2004,22 +1969,17 @@ def test_batch_set_standard_blob_tier_for_version(self, **kwargs): tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) - blob = container.get_blob_client('blob1') - blob2 = container.get_blob_client('blob2') - blob3 = container.get_blob_client('blob3') - data = b'hello world' + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) + blob = container.get_blob_client("blob1") + blob2 = container.get_blob_client("blob2") + blob3 = container.get_blob_client("blob3") + data = b"hello world" resp1 = blob.upload_blob(data, overwrite=True) resp2 = blob2.upload_blob(data, overwrite=True) resp3 = blob3.upload_blob(data, overwrite=True) snapshot = blob3.create_snapshot() - data2 = b'abc' + data2 = b"abc" blob.upload_blob(data2, overwrite=True) blob2.upload_blob(data2, overwrite=True) blob3.upload_blob(data2, overwrite=True) @@ -2029,9 +1989,9 @@ def test_batch_set_standard_blob_tier_for_version(self, **kwargs): parts = container.set_standard_blob_tier_blobs( tier, prop, - {'name': 'blob2', 'version_id': resp2['version_id']}, - {'name': 'blob3', 'snapshot': snapshot['snapshot']}, - raise_on_any_failure=False + {"name": "blob2", "version_id": resp2["version_id"]}, + {"name": "blob3", "snapshot": snapshot["snapshot"]}, + raise_on_any_failure=False, ) parts = list(parts) @@ -2046,12 +2006,7 @@ def test_batch_set_standard_blob_tier_for_version(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.live_test_only @BlobPreparer() @@ -2065,11 +2020,11 @@ def test_standard_blob_tier_with_if_tags(self, **kwargs): tier = StandardBlobTier.Cool tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - blob = container.get_blob_client('blob1') - data = b'hello world' + blob = container.get_blob_client("blob1") + data = b"hello world" blob.upload_blob(data, overwrite=True, tags=tags) - container.get_blob_client('blob2').upload_blob(data, overwrite=True, tags=tags) - container.get_blob_client('blob3').upload_blob(data, overwrite=True, tags=tags) + container.get_blob_client("blob2").upload_blob(data, overwrite=True, tags=tags) + container.get_blob_client("blob3").upload_blob(data, overwrite=True, tags=tags) blob_ref = blob.get_blob_properties() assert blob_ref.blob_tier is not None @@ -2078,19 +2033,11 @@ def test_standard_blob_tier_with_if_tags(self, **kwargs): with pytest.raises(PartialBatchErrorException): container.set_standard_blob_tier_blobs( - tier, - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag WRONG'" + tier, "blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag WRONG'" ) parts = container.set_standard_blob_tier_blobs( - tier, - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag'" + tier, "blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag'" ) parts = list(parts) @@ -2105,12 +2052,7 @@ def test_standard_blob_tier_with_if_tags(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.live_test_only @BlobPreparer() @@ -2124,34 +2066,23 @@ def test_standard_blob_tier_set_tiers_with_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), sas_token) container = self._create_container(bsc) tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) - blob = container.get_blob_client('blob1') - data = b'hello world' + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) + blob = container.get_blob_client("blob1") + data = b"hello world" blob.upload_blob(data) - container.get_blob_client('blob2').upload_blob(data) - container.get_blob_client('blob3').upload_blob(data) + container.get_blob_client("blob2").upload_blob(data) + container.get_blob_client("blob3").upload_blob(data) blob_ref = blob.get_blob_properties() - parts = container.set_standard_blob_tier_blobs( - tier, - blob_ref, - 'blob2', - 'blob3', - timeout=5 - ) + parts = container.set_standard_blob_tier_blobs(tier, blob_ref, "blob2", "blob3", timeout=5) parts = list(parts) assert len(parts) == 3 @@ -2165,12 +2096,7 @@ def test_standard_blob_tier_set_tiers_with_sas(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - response = container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + response = container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.skip(reason="Wasn't able to get premium account with batch enabled") # once we have premium tests, still we don't want to test Py 2.7 @@ -2186,7 +2112,7 @@ def test_premium_tier_set_tier_api_batch(self, **kwargs): pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if not self.is_playback(): @@ -2195,10 +2121,10 @@ def test_premium_tier_set_tier_api_batch(self, **kwargs): except ResourceExistsError: pass - pblob = container.get_blob_client('blob1') + pblob = container.get_blob_client("blob1") pblob.create_page_blob(1024) - container.get_blob_client('blob2').create_page_blob(1024) - container.get_blob_client('blob3').create_page_blob(1024) + container.get_blob_client("blob2").create_page_blob(1024) + container.get_blob_client("blob3").create_page_blob(1024) blob_ref = pblob.get_blob_properties() assert PremiumPageBlobTier.P10 == blob_ref.blob_tier @@ -2207,9 +2133,9 @@ def test_premium_tier_set_tier_api_batch(self, **kwargs): parts = container.set_premium_page_blob_tier_blobs( PremiumPageBlobTier.P50, - 'blob1', - 'blob2', - 'blob3', + "blob1", + "blob2", + "blob3", ) parts = list(parts) @@ -2219,7 +2145,6 @@ def test_premium_tier_set_tier_api_batch(self, **kwargs): assert parts[1].status_code in [200, 202] assert parts[2].status_code in [200, 202] - blob_ref2 = pblob.get_blob_properties() assert PremiumPageBlobTier.P50 == blob_ref2.blob_tier assert not blob_ref2.blob_tier_inferred @@ -2235,17 +2160,18 @@ def test_walk_blobs_with_delimiter(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('a/blob1').upload_blob(data) - container.get_blob_client('a/blob2').upload_blob(data) - container.get_blob_client('b/c/blob3').upload_blob(data) - container.get_blob_client('blob4').upload_blob(data) + container.get_blob_client("a/blob1").upload_blob(data) + container.get_blob_client("a/blob2").upload_blob(data) + container.get_blob_client("b/c/blob3").upload_blob(data) + container.get_blob_client("blob4").upload_blob(data) blob_list = [] + def recursive_walk(prefix): for b in prefix: - if b.get('prefix'): + if b.get("prefix"): recursive_walk(b) else: blob_list.append(b.name) @@ -2255,7 +2181,7 @@ def recursive_walk(prefix): # Assert assert len(blob_list) == 4 - assert blob_list, ['a/blob1', 'a/blob2', 'b/c/blob3' == 'blob4'] + assert blob_list, ["a/blob1", "a/blob2", "b/c/blob3" == "blob4"] @BlobPreparer() @recorded_by_proxy @@ -2265,22 +2191,22 @@ def test_walk_blobs_with_prefix_delimiter_versions(self, **kwargs): bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('a/blob1').upload_blob(data) - container.get_blob_client('a/blob2').upload_blob(data) - container.get_blob_client('b/blob3').upload_blob(data) + container.get_blob_client("a/blob1").upload_blob(data) + container.get_blob_client("a/blob2").upload_blob(data) + container.get_blob_client("b/blob3").upload_blob(data) # Act - prefix_list = list(container.walk_blobs(name_starts_with='a', delimiter='/', include=['versions'])) + prefix_list = list(container.walk_blobs(name_starts_with="a", delimiter="/", include=["versions"])) # Assert assert len(prefix_list) == 1 a = list(prefix_list[0]) assert len(a) == 2 - assert a[0].name == 'a/blob1' + assert a[0].name == "a/blob1" assert a[0].version_id - assert a[1].name == 'a/blob2' + assert a[1].name == "a/blob2" assert a[1].version_id @BlobPreparer() @@ -2291,9 +2217,9 @@ def test_walk_blobs_cold_tier(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('blob1').upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) + container.get_blob_client("blob1").upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) # Act resp = list(container.walk_blobs()) @@ -2310,30 +2236,30 @@ def test_list_blobs_with_include_multiple(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') - blob1.upload_blob(data, metadata={'number': '1', 'name': 'bob'}) + data = b"hello world" + blob1 = container.get_blob_client("blob1") + blob1.upload_blob(data, metadata={"number": "1", "name": "bob"}) blob1.create_snapshot() - container.get_blob_client('blob2').upload_blob(data, metadata={'number': '2', 'name': 'car'}) + container.get_blob_client("blob2").upload_blob(data, metadata={"number": "2", "name": "car"}) # Act blobs = list(container.list_blobs(include=["snapshots", "metadata"])) # Assert assert len(blobs) == 3 - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].snapshot is not None - assert blobs[0].metadata['number'] == '1' - assert blobs[0].metadata['name'] == 'bob' - assert blobs[1].name == 'blob1' + assert blobs[0].metadata["number"] == "1" + assert blobs[0].metadata["name"] == "bob" + assert blobs[1].name == "blob1" assert blobs[1].snapshot is None - assert blobs[1].metadata['number'] == '1' - assert blobs[1].metadata['name'] == 'bob' - assert blobs[2].name == 'blob2' + assert blobs[1].metadata["number"] == "1" + assert blobs[1].metadata["name"] == "bob" + assert blobs[2].name == "blob2" assert blobs[2].snapshot is None - assert blobs[2].metadata['number'] == '2' - assert blobs[2].metadata['name'] == 'car' + assert blobs[2].metadata["number"] == "2" + assert blobs[2].metadata["name"] == "car" @pytest.mark.live_test_only @BlobPreparer() @@ -2344,8 +2270,8 @@ def test_shared_access_container(self, **kwargs): # SAS URL is calculated from storage key, so this test runs live only bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - blob_name = 'blob1' - data = b'hello world' + blob_name = "blob1" + data = b"hello world" blob = container.get_blob_client(blob_name) blob.upload_blob(data) @@ -2395,7 +2321,7 @@ def test_web_container_normal_operations_working(self, **kwargs): blob.upload_blob(blob_content) # get a blob - blob_data = blob.download_blob(encoding='utf-8') + blob_data = blob.download_blob(encoding="utf-8") assert blob is not None assert blob_data.readall() == blob_content @@ -2414,10 +2340,11 @@ def test_user_delegation_sas_for_container(self, **kwargs): # Arrange token_credential = self.get_credential(BlobServiceClient) service_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) - user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), - datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service_client.get_user_delegation_key( + datetime.utcnow(), datetime.utcnow() + timedelta(hours=1) + ) - container_client = service_client.create_container(self.get_resource_name('oauthcontainer')) + container_client = service_client.create_container(self.get_resource_name("oauthcontainer")) token = generate_container_sas( container_client.account_name, container_client.container_name, @@ -2427,13 +2354,13 @@ def test_user_delegation_sas_for_container(self, **kwargs): user_delegation_key=user_delegation_key, ) - blob_client = container_client.get_blob_client(self.get_resource_name('oauthblob')) + blob_client = container_client.get_blob_client(self.get_resource_name("oauthblob")) blob_content = self.get_random_text_data(1024) blob_client.upload_blob(blob_content, length=len(blob_content)) # Act new_blob_client = BlobClient.from_blob_url(blob_client.url, credential=token) - content = new_blob_client.download_blob(encoding='utf-8') + content = new_blob_client.download_blob(encoding="utf-8") # Assert assert blob_content == content.readall() @@ -2441,17 +2368,17 @@ def test_user_delegation_sas_for_container(self, **kwargs): def test_set_container_permission_from_string(self): # Arrange permission1 = ContainerSasPermissions(read=True, write=True) - permission2 = ContainerSasPermissions.from_string('wr') + permission2 = ContainerSasPermissions.from_string("wr") assert permission1.read == permission2.read assert permission1.write == permission2.write def test_set_container_permission(self): # Arrange - permission = ContainerSasPermissions.from_string('wrlx') + permission = ContainerSasPermissions.from_string("wrlx") assert permission.read == True assert permission.list == True assert permission.write == True - assert permission._str == 'rwxl' + assert permission._str == "rwxl" @BlobPreparer() @recorded_by_proxy @@ -2461,7 +2388,7 @@ def test_download_blob(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = self._create_container(bsc) - data = b'hello world' + data = b"hello world" blob_name = self.get_resource_name("blob") container.get_blob_client(blob_name).upload_blob(data) @@ -2481,7 +2408,7 @@ def test_download_blob_with_properties_versioning(self, **kwargs): container: ContainerClient = self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = b'abc' + blob_data = b"abc" blob_client = container.get_blob_client(blob_name) blob_client.upload_blob(blob_data, overwrite=True) @@ -2492,8 +2419,8 @@ def test_download_blob_with_properties_versioning(self, **kwargs): v3_props = blob_client.get_blob_properties() # Act - downloaded = container.download_blob(v2_props, version_id=v1_props['version_id']) - downloaded2 = container.download_blob(v2_props, version_id=v3_props['version_id']) + downloaded = container.download_blob(v2_props, version_id=v1_props["version_id"]) + downloaded2 = container.download_blob(v2_props, version_id=v3_props["version_id"]) # Assert assert downloaded.readall() == blob_data @@ -2505,18 +2432,21 @@ def test_download_blob_in_chunks_where_maxsinglegetsize_is_multiple_of_chunksize storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=1024, - max_chunk_get_size=512) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=1024, + max_chunk_get_size=512, + ) container = self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 + data = b"hello world python storage test chunks" * 1024 blob_name = self.get_resource_name("testiteratechunks") container.get_blob_client(blob_name).upload_blob(data, overwrite=True) # Act - downloader= container.download_blob(blob_name) - downloaded_data = b'' + downloader = container.download_blob(blob_name) + downloaded_data = b"" chunk_size_list = list() for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2538,7 +2468,7 @@ def test_get_blob_client_with_properties_versioning(self, **kwargs): container: ContainerClient = self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) # Act @@ -2551,22 +2481,24 @@ def test_get_blob_client_with_properties_versioning(self, **kwargs): blob_client.upload_blob(blob_data * 4, overwrite=True) v4_props = blob_client.get_blob_properties() - v1_blob_client = container.get_blob_client(blob=v1_props['name'], version_id=v1_props['version_id']) + v1_blob_client = container.get_blob_client(blob=v1_props["name"], version_id=v1_props["version_id"]) props1 = v1_blob_client.get_blob_properties() - v2_blob_client = container.get_blob_client(blob=v1_props, version_id=v2_props['version_id']) + v2_blob_client = container.get_blob_client(blob=v1_props, version_id=v2_props["version_id"]) props2 = v2_blob_client.get_blob_properties() - v3_blob_client = bsc.get_blob_client(container=container.container_name, blob=v2_props['name'], - version_id=v3_props['version_id']) + v3_blob_client = bsc.get_blob_client( + container=container.container_name, blob=v2_props["name"], version_id=v3_props["version_id"] + ) props3 = v3_blob_client.get_blob_properties() - v4_blob_client = bsc.get_blob_client(container=container.container_name, blob=v3_props, - version_id=v4_props['version_id']) + v4_blob_client = bsc.get_blob_client( + container=container.container_name, blob=v3_props, version_id=v4_props["version_id"] + ) props4 = v4_blob_client.get_blob_properties() # Assert - assert props1['version_id'] == v1_props['version_id'] - assert props2['version_id'] == v2_props['version_id'] - assert props3['version_id'] == v3_props['version_id'] - assert props4['version_id'] == v4_props['version_id'] + assert props1["version_id"] == v1_props["version_id"] + assert props2["version_id"] == v2_props["version_id"] + assert props3["version_id"] == v3_props["version_id"] + assert props4["version_id"] == v4_props["version_id"] @BlobPreparer() @recorded_by_proxy @@ -2574,11 +2506,14 @@ def test_download_blob_modified(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=38, - max_chunk_get_size=38) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=38, + max_chunk_get_size=38, + ) container = self._create_container(bsc, prefix="cont") - data = b'hello world python storage test chunks' * 5 + data = b"hello world python storage test chunks" * 5 blob_name = self.get_resource_name("testblob") blob = container.get_blob_client(blob_name) blob.upload_blob(data, overwrite=True) @@ -2595,19 +2530,23 @@ def test_download_blob_modified(self, **kwargs): @BlobPreparer() @recorded_by_proxy def test_download_blob_in_chunks_where_maxsinglegetsize_not_multiple_of_chunksize( - self, storage_account_name, storage_account_key): - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=1024, - max_chunk_get_size=666) + self, storage_account_name, storage_account_key + ): + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=1024, + max_chunk_get_size=666, + ) container = self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 - blob_name = self.get_resource_name("testiteratechunks") + data = b"hello world python storage test chunks" * 1024 + blob_name = self.get_resource_name("testiteratechunks") container.get_blob_client(blob_name).upload_blob(data, overwrite=True) # Act - downloader= container.download_blob(blob_name) - downloaded_data = b'' + downloader = container.download_blob(blob_name) + downloaded_data = b"" chunk_size_list = list() for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2625,18 +2564,21 @@ def test_download_blob_in_chunks_where_maxsinglegetsize_smallert_than_chunksize( storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=215, - max_chunk_get_size=512) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=215, + max_chunk_get_size=512, + ) container = self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 + data = b"hello world python storage test chunks" * 1024 blob_name = self.get_resource_name("testiteratechunks") container.get_blob_client(blob_name).upload_blob(data, overwrite=True) # Act - downloader= container.download_blob(blob_name) - downloaded_data = b'' + downloader = container.download_blob(blob_name) + downloaded_data = b"" chunk_size_list = list() for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2656,11 +2598,11 @@ def test_list_blob_names(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container: ContainerClient = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('blob1').upload_blob(data, overwrite=True) - container.get_blob_client('blob2').upload_blob(data, overwrite=True) - container.get_blob_client('test1').upload_blob(data, overwrite=True) + container.get_blob_client("blob1").upload_blob(data, overwrite=True) + container.get_blob_client("blob2").upload_blob(data, overwrite=True) + container.get_blob_client("test1").upload_blob(data, overwrite=True) # Act all_blobs = list(container.list_blob_names()) @@ -2668,11 +2610,11 @@ def test_list_blob_names(self, **kwargs): # Assert assert len(all_blobs) == 3 - assert all_blobs[0] == 'blob1' - assert all_blobs[1] == 'blob2' - assert all_blobs[2] == 'test1' + assert all_blobs[0] == "blob1" + assert all_blobs[1] == "blob2" + assert all_blobs[2] == "test1" assert len(test_blobs) == 1 - assert test_blobs[0] == 'test1' + assert test_blobs[0] == "test1" @BlobPreparer() @recorded_by_proxy @@ -2682,11 +2624,11 @@ def test_list_blob_names_pagination(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container: ContainerClient = self._create_container(bsc) - data = b'hello world' + data = b"hello world" - container.get_blob_client('blob1').upload_blob(data, overwrite=True) - container.get_blob_client('blob2').upload_blob(data, overwrite=True) - container.get_blob_client('blob3').upload_blob(data, overwrite=True) + container.get_blob_client("blob1").upload_blob(data, overwrite=True) + container.get_blob_client("blob2").upload_blob(data, overwrite=True) + container.get_blob_client("blob3").upload_blob(data, overwrite=True) # Act blob_pages = container.list_blob_names(results_per_page=2).by_page() @@ -2697,10 +2639,10 @@ def test_list_blob_names_pagination(self, **kwargs): # Assert assert len(items_on_page1) == 2 - assert items_on_page1[0] == 'blob1' - assert items_on_page1[1] == 'blob2' + assert items_on_page1[0] == "blob1" + assert items_on_page1[1] == "blob2" assert len(items_on_page2) == 1 - assert items_on_page2[0] == 'blob3' + assert items_on_page2[0] == "blob3" @BlobPreparer() @recorded_by_proxy @@ -2709,16 +2651,18 @@ def test_storage_account_audience_container_client(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - cc = ContainerClient(self.account_url(storage_account_name, "blob"), 'testcont', storage_account_key) + cc = ContainerClient(self.account_url(storage_account_name, "blob"), "testcont", storage_account_key) cc.exists() # Act token_credential = self.get_credential(ContainerClient) cc = ContainerClient( - self.account_url(storage_account_name, "blob"), 'testcont', credential=token_credential, - audience=f'https://{storage_account_name}.blob.core.windows.net' + self.account_url(storage_account_name, "blob"), + "testcont", + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert response = cc.exists() - assert response is not None \ No newline at end of file + assert response is not None diff --git a/sdk/storage/azure-storage-blob/tests/test_container_async.py b/sdk/storage/azure-storage-blob/tests/test_container_async.py index c5cb770d14e3..1daf32f9f5a8 100644 --- a/sdk/storage/azure-storage-blob/tests/test_container_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_container_async.py @@ -25,13 +25,9 @@ PublicAccess, ResourceTypes, StandardBlobTier, - StorageErrorCode - ) -from azure.storage.blob.aio import ( - BlobClient, - BlobServiceClient, - ContainerClient + StorageErrorCode, ) +from azure.storage.blob.aio import BlobClient, BlobServiceClient, ContainerClient from devtools_testutils import set_custom_default_matcher from devtools_testutils.aio import recorded_by_proxy_async @@ -39,14 +35,14 @@ from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase from settings.testcase import BlobPreparer -#------------------------------------------------------------------------------ -TEST_CONTAINER_PREFIX = 'acontainer' -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +TEST_CONTAINER_PREFIX = "acontainer" +# ------------------------------------------------------------------------------ class TestStorageContainerAsync(AsyncStorageRecordedTestCase): - #--Helpers----------------------------------------------------------------- + # --Helpers----------------------------------------------------------------- def _get_container_reference(self, prefix=TEST_CONTAINER_PREFIX): container_name = self.get_resource_name(prefix) return container_name @@ -66,7 +62,7 @@ async def _to_list(self, async_iterator): result.append(item) return result - #--Test cases for containers ----------------------------------------- + # --Test cases for containers ----------------------------------------- @BlobPreparer() @recorded_by_proxy_async async def test_create_container(self, **kwargs): @@ -113,7 +109,7 @@ async def test_create_container_with_public_access_container(self, **kwargs): # Act container = bsc.get_container_client(container_name) - created = await container.create_container(public_access='container') + created = await container.create_container(public_access="container") # Assert assert created @@ -130,15 +126,14 @@ async def test_create_container_with_public_access_blob(self, **kwargs): # Act container = bsc.get_container_client(container_name) - created = await container.create_container(public_access='blob') + created = await container.create_container(public_access="blob") blob = container.get_blob_client("blob1") - await blob.upload_blob(u'xyz') + await blob.upload_blob("xyz") anonymous_service = BlobClient( - self.account_url(storage_account_name, "blob"), - container_name=container_name, - blob_name="blob1") + self.account_url(storage_account_name, "blob"), container_name=container_name, blob_name="blob1" + ) # Assert assert created @@ -152,7 +147,7 @@ async def test_create_container_with_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container_name = self._get_container_reference() - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act container = bsc.get_container_client(container_name) @@ -172,7 +167,7 @@ async def test_container_exists_with_lease(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act exists = await container.get_container_properties() @@ -213,11 +208,14 @@ async def test_download_blob_modified(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=38, - max_chunk_get_size=38) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=38, + max_chunk_get_size=38, + ) container = await self._create_container(bsc, prefix="cont1") - data = b'hello world python storage test chunks' * 5 + data = b"hello world python storage test chunks" * 5 blob_name = self.get_resource_name("testblob") blob = container.get_blob_client(blob_name) await blob.upload_blob(data, overwrite=True) @@ -233,8 +231,7 @@ async def test_download_blob_modified(self, **kwargs): @pytest.mark.skip(reason="Feature not yet enabled. Make sure to record this test once enabled.") @BlobPreparer() - async def test_rename_container_with_container_client( - self, storage_account_name, storage_account_key): + async def test_rename_container_with_container_client(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) old_name1 = self._get_container_reference(prefix="oldcontainer1") old_name2 = self._get_container_reference(prefix="oldcontainer2") @@ -269,7 +266,7 @@ async def test_rename_container_with_source_lease(self, **kwargs): new_name = self._get_container_reference(prefix="new") container = bsc.get_container_client(old_name) await container.create_container() - container_lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + container_lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") with pytest.raises(HttpResponseError): await bsc._rename_container(name=old_name, new_name=new_name) with pytest.raises(HttpResponseError): @@ -285,7 +282,7 @@ async def test_unicode_create_container_unicode_name(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container_name = u'啊齄丂狛狜' + container_name = "啊齄丂狛狜" container = bsc.get_container_client(container_name) # Act @@ -309,7 +306,6 @@ async def test_list_containers(self, **kwargs): async for c in bsc.list_containers(): containers.append(c) - # Assert assert containers is not None assert len(containers) >= 1 @@ -367,14 +363,12 @@ async def test_list_containers_with_include_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} resp = await container.set_container_metadata(metadata) # Act containers = [] - async for c in bsc.list_containers( - name_starts_with=container.container_name, - include_metadata=True): + async for c in bsc.list_containers(name_starts_with=container.container_name, include_metadata=True): containers.append(c) # Assert @@ -394,12 +388,12 @@ async def test_list_containers_with_public_access(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} resp = await container.set_container_access_policy(signed_identifier, public_access=PublicAccess.Blob) # Act @@ -423,7 +417,7 @@ async def test_list_containers_with_num_results_and_marker(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - prefix = 'listcontainerasync' + prefix = "listcontainerasync" container_names = [] for i in range(0, 4): cr = await self._create_container(bsc, prefix + str(i)) @@ -437,8 +431,9 @@ async def test_list_containers_with_num_results_and_marker(self, **kwargs): async for c in await generator1.__anext__(): containers1.append(c) - generator2 = bsc.list_containers( - name_starts_with=prefix, results_per_page=2).by_page(generator1.continuation_token) + generator2 = bsc.list_containers(name_starts_with=prefix, results_per_page=2).by_page( + generator1.continuation_token + ) containers2 = [] async for c in await generator2.__anext__(): containers2.append(c) @@ -468,7 +463,7 @@ async def test_list_containers_account_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(service=True), permission=AccountSasPermissions(list=True), - expiry=datetime.utcnow() + timedelta(hours=3) + expiry=datetime.utcnow() + timedelta(hours=3), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=sas_token) @@ -491,7 +486,7 @@ async def test_set_container_metadata(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container = await self._create_container(bsc) # Act @@ -508,9 +503,9 @@ async def test_set_container_metadata_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '43'} + metadata = {"hello": "world", "number": "43"} container = await self._create_container(bsc) - lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act await container.set_container_metadata(metadata, lease=lease_id) @@ -532,7 +527,7 @@ async def test_set_container_metadata_with_non_existing_container(self, **kwargs # Act with pytest.raises(ResourceNotFoundError): - await container.set_container_metadata({'hello': 'world', 'number': '43'}) + await container.set_container_metadata({"hello": "world", "number": "43"}) # Assert @@ -543,7 +538,7 @@ async def test_get_container_metadata(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = await self._create_container(bsc) await container.set_container_metadata(metadata) @@ -561,10 +556,10 @@ async def test_get_container_metadata_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = await self._create_container(bsc) await container.set_container_metadata(metadata) - lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act md = await container.get_container_properties(lease=lease_id) @@ -579,8 +574,7 @@ async def test_container_exists(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url( - storage_account_name, "blob"), storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container1 = await self._create_container(bsc, prefix="container1") container2_name = self._get_container_reference(prefix="container2") @@ -595,9 +589,8 @@ async def test_get_container_properties(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) + metadata = {"hello": "world", "number": "42"} container = await self._create_container(bsc) await container.set_container_metadata(metadata) @@ -618,10 +611,10 @@ async def test_get_container_properties_with_lease_id(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} container = await self._create_container(bsc) await container.set_container_metadata(metadata) - lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act props = await container.get_container_properties(lease=lease_id) @@ -630,9 +623,9 @@ async def test_get_container_properties_with_lease_id(self, **kwargs): # Assert assert props is not None assert props.metadata == metadata - assert props.lease.duration == 'infinite' - assert props.lease.state == 'leased' - assert props.lease.status == 'locked' + assert props.lease.duration == "infinite" + assert props.lease.state == "leased" + assert props.lease.status == "locked" @BlobPreparer() @recorded_by_proxy_async @@ -648,8 +641,8 @@ async def test_get_container_acl(self, **kwargs): # Assert assert acl is not None - assert acl.get('public_access') is None - assert len(acl.get('signed_identifiers')) == 0 + assert acl.get("public_access") is None + assert len(acl.get("signed_identifiers")) == 0 @BlobPreparer() @recorded_by_proxy_async @@ -659,42 +652,42 @@ async def test_get_container_acl_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act acl = await container.get_container_access_policy(lease=lease_id) # Assert assert acl is not None - assert acl.get('public_access') is None + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy_async async def test_set_container_acl(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - variables = kwargs.pop('variables', {}) + variables = kwargs.pop("variables", {}) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} response = await container.set_container_access_policy(signed_identifier) - assert response.get('etag') is not None - assert response.get('last_modified') is not None + assert response.get("etag") is not None + assert response.get("last_modified") is not None # Assert acl = await container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 1 - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 1 + assert acl.get("public_access") is None return variables @@ -709,18 +702,18 @@ async def test_set_container_acl_with_one_signed_identifier(self, **kwargs): container = await self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} response = await container.set_container_access_policy(signed_identifier) # Assert - assert response.get('etag') is not None - assert response.get('last_modified') is not None + assert response.get("etag") is not None + assert response.get("last_modified") is not None return variables @@ -733,21 +726,21 @@ async def test_set_container_acl_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - lease_id = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} await container.set_container_access_policy(signed_identifier, lease=lease_id) # Assert acl = await container.get_container_access_policy() assert acl is not None - assert acl.get('public_access') is None + assert acl.get("public_access") is None return variables @@ -763,18 +756,18 @@ async def test_set_container_acl_with_public_access(self, **kwargs): container = await self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow()) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - signed_identifier = {'testid': access_policy} - await container.set_container_access_policy(signed_identifier, public_access='container') + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow()) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + signed_identifier = {"testid": access_policy} + await container.set_container_access_policy(signed_identifier, public_access="container") # Assert acl = await container.get_container_access_policy() assert acl is not None - assert 'container' == acl.get('public_access') + assert "container" == acl.get("public_access") return variables @@ -793,8 +786,8 @@ async def test_set_container_acl_with_empty_signed_identifiers(self, **kwargs): # Assert acl = await container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 0 - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 0 + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy_async @@ -807,19 +800,19 @@ async def test_set_container_acl_with_signed_identifiers(self, **kwargs): container = await self._create_container(bsc) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow() - timedelta(minutes=1)) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) - identifiers = {'testid': access_policy} + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) + identifiers = {"testid": access_policy} await container.set_container_access_policy(identifiers) # Assert acl = await container.get_container_access_policy() assert acl is not None - assert 'testid' == acl.get('signed_identifiers')[0].id - assert acl.get('public_access') is None + assert "testid" == acl.get("signed_identifiers")[0].id + assert acl.get("public_access") is None return variables @@ -839,10 +832,10 @@ async def test_set_container_acl_with_empty_identifiers(self, **kwargs): # Assert acl = await container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 3 - assert '0' == acl.get('signed_identifiers')[0].id - assert acl.get('signed_identifiers')[0].access_policy is None - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 3 + assert "0" == acl.get("signed_identifiers")[0].id + assert acl.get("signed_identifiers")[0].access_policy is None + assert acl.get("public_access") is None @BlobPreparer() @recorded_by_proxy_async @@ -854,11 +847,11 @@ async def test_set_container_acl_with_three_identifiers(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - start_time = self.get_datetime_variable(variables, 'start_time', datetime.utcnow() - timedelta(minutes=1)) - access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), - expiry=expiry_time, - start=start_time) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) + start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=1)) + access_policy = AccessPolicy( + permission=ContainerSasPermissions(read=True), expiry=expiry_time, start=start_time + ) identifiers = {i: access_policy for i in range(2)} # Act @@ -867,10 +860,10 @@ async def test_set_container_acl_with_three_identifiers(self, **kwargs): # Assert acl = await container.get_container_access_policy() assert acl is not None - assert len(acl.get('signed_identifiers')) == 2 - assert '0' == acl.get('signed_identifiers')[0].id - assert acl.get('signed_identifiers')[0].access_policy is not None - assert acl.get('public_access') is None + assert len(acl.get("signed_identifiers")) == 2 + assert "0" == acl.get("signed_identifiers")[0].id + assert acl.get("signed_identifiers")[0].access_policy is not None + assert acl.get("public_access") is None return variables @@ -886,12 +879,15 @@ async def test_set_container_acl_too_many_ids(self, **kwargs): # Act identifiers = dict() for i in range(0, 6): - identifiers['id{}'.format(i)] = AccessPolicy() + identifiers["id{}".format(i)] = AccessPolicy() # Assert with pytest.raises(ValueError) as e: await container_name.set_container_access_policy(identifiers) - assert str(e.value.args[0]) == 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.' + assert ( + str(e.value.args[0]) + == "Too many access policies provided. The server does not support setting more than 5 access policies on a single resource." + ) @BlobPreparer() @recorded_by_proxy_async @@ -903,7 +899,7 @@ async def test_lease_container_acquire_and_release(self, **kwargs): container = await self._create_container(bsc) # Act - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") await lease.release() # Assert @@ -916,7 +912,7 @@ async def test_lease_container_renew(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) self.sleep(10) lease_id_start = lease.id @@ -941,7 +937,7 @@ async def test_lease_container_break_period(self, **kwargs): container = await self._create_container(bsc) # Act - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert await lease.break_lease(lease_break_period=5) @@ -957,7 +953,7 @@ async def test_lease_container_break_released_lease_fails(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") await lease.release() # Act @@ -976,13 +972,13 @@ async def test_lease_container_with_duration(self, **kwargs): container = await self._create_container(bsc) # Act - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert with pytest.raises(HttpResponseError): await container.acquire_lease() self.sleep(17) - await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") @BlobPreparer() @recorded_by_proxy_async @@ -994,7 +990,7 @@ async def test_lease_container_twice(self, **kwargs): container = await self._create_container(bsc) # Act - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Assert lease2 = await container.acquire_lease(lease_id=lease.id) @@ -1010,7 +1006,7 @@ async def test_lease_container_with_proposed_lease_id(self, **kwargs): container = await self._create_container(bsc) # Act - proposed_lease_id = '55e97f64-73e8-4390-838d-d9e84a374321' + proposed_lease_id = "55e97f64-73e8-4390-838d-d9e84a374321" lease = await container.acquire_lease(lease_id=proposed_lease_id) # Assert @@ -1026,8 +1022,8 @@ async def test_lease_container_change_lease_id(self, **kwargs): container = await self._create_container(bsc) # Act - lease_id = '29e0b239-ecda-4f69-bfa3-95f6af91464c' - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease_id = "29e0b239-ecda-4f69-bfa3-95f6af91464c" + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") lease_id1 = lease.id await lease.change(proposed_lease_id=lease_id) await lease.renew() @@ -1070,7 +1066,7 @@ async def test_delete_cntnr_w_nonexisting_cntnr_fail_not_exist(self, **kwargs): await container.delete_container() log_as_str = log_captured.getvalue() - #assert 'ERROR' in log_as_str + # assert 'ERROR' in log_as_str @BlobPreparer() @recorded_by_proxy_async @@ -1080,7 +1076,7 @@ async def test_delete_container_with_lease_id(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - lease = await container.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', lease_duration=15) + lease = await container.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444", lease_duration=15) # Act deleted = await container.delete_container(lease=lease) @@ -1129,18 +1125,17 @@ async def test_list_names(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - - await (container.get_blob_client('blob1')).upload_blob(data) - await (container.get_blob_client('blob2')).upload_blob(data) + data = b"hello world" + await container.get_blob_client("blob1").upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) # Act blobs = [] async for b in container.list_blobs(): blobs.append(b.name) - assert blobs, ['blob1' == 'blob2'] + assert blobs, ["blob1" == "blob2"] @BlobPreparer() @recorded_by_proxy_async @@ -1150,9 +1145,9 @@ async def test_list_blobs_returns_rehydrate_priority(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - blob_client = container.get_blob_client('blob1') + blob_client = container.get_blob_client("blob1") await blob_client.upload_blob(data, standard_blob_tier=StandardBlobTier.Archive) await blob_client.set_standard_blob_tier(StandardBlobTier.Hot) @@ -1169,9 +1164,9 @@ async def test_list_blobs_cold_tier(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - blob_client = container.get_blob_client('blob1') + blob_client = container.get_blob_client("blob1") await blob_client.upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) # Act @@ -1187,10 +1182,10 @@ async def test_list_blobs(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - cr0 = container.get_blob_client('blob1') + data = b"hello world" + cr0 = container.get_blob_client("blob1") await cr0.upload_blob(data) - cr1 = container.get_blob_client('blob2') + cr1 = container.get_blob_client("blob2") await cr1.upload_blob(data) # Act @@ -1202,10 +1197,10 @@ async def test_list_blobs(self, **kwargs): assert blobs is not None assert len(blobs) >= 2 assert blobs[0] is not None - self.assertNamedItemInContainer(blobs, 'blob1') - self.assertNamedItemInContainer(blobs, 'blob2') + self.assertNamedItemInContainer(blobs, "blob1") + self.assertNamedItemInContainer(blobs, "blob2") assert blobs[0].size == 11 - assert blobs[1].content_settings.content_type == 'application/octet-stream' + assert blobs[1].content_settings.content_type == "application/octet-stream" assert blobs[0].creation_time is not None @pytest.mark.playback_test_only @@ -1216,14 +1211,14 @@ async def test_list_blobs_with_object_replication_policy(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container = bsc.get_container_client('orp-source') - data = b'hello world' - b_c = container.get_blob_client('blob3') + container = bsc.get_container_client("orp-source") + data = b"hello world" + b_c = container.get_blob_client("blob3") await b_c.upload_blob(data, overwrite=True) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} await b_c.set_blob_metadata(metadata) - await container.get_blob_client('blob4').upload_blob(data, overwrite=True) + await container.get_blob_client("blob4").upload_blob(data, overwrite=True) # Act blobs_list = container.list_blobs() @@ -1243,10 +1238,10 @@ async def test_list_blobs_leased_blob(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') + data = b"hello world" + blob1 = container.get_blob_client("blob1") await blob1.upload_blob(data) - lease = await blob1.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob1.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act resp = [] @@ -1256,11 +1251,11 @@ async def test_list_blobs_leased_blob(self, **kwargs): assert resp is not None assert len(resp) >= 1 assert resp[0] is not None - self.assertNamedItemInContainer(resp, 'blob1') + self.assertNamedItemInContainer(resp, "blob1") assert resp[0].size == 11 - assert resp[0].lease.duration == 'infinite' - assert resp[0].lease.status == 'locked' - assert resp[0].lease.state == 'leased' + assert resp[0].lease.duration == "infinite" + assert resp[0].lease.status == "locked" + assert resp[0].lease.state == "leased" @BlobPreparer() @recorded_by_proxy_async @@ -1270,24 +1265,24 @@ async def test_list_blobs_with_prefix(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - c0 = container.get_blob_client('blob_a1') + data = b"hello world" + c0 = container.get_blob_client("blob_a1") await c0.upload_blob(data) - c1 = container.get_blob_client('blob_a2') + c1 = container.get_blob_client("blob_a2") await c1.upload_blob(data) - c2 = container.get_blob_client('blob_b1') + c2 = container.get_blob_client("blob_b1") await c2.upload_blob(data) # Act resp = [] - async for b in container.list_blobs(name_starts_with='blob_a'): + async for b in container.list_blobs(name_starts_with="blob_a"): resp.append(b) # Assert assert resp is not None assert len(resp) == 2 - self.assertNamedItemInContainer(resp, 'blob_a1') - self.assertNamedItemInContainer(resp, 'blob_a2') + self.assertNamedItemInContainer(resp, "blob_a1") + self.assertNamedItemInContainer(resp, "blob_a2") @BlobPreparer() @recorded_by_proxy_async @@ -1297,14 +1292,14 @@ async def test_list_blobs_with_num_results(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - c0 = container.get_blob_client('blob_a1') + data = b"hello world" + c0 = container.get_blob_client("blob_a1") await c0.upload_blob(data) - c1 = container.get_blob_client('blob_a2') + c1 = container.get_blob_client("blob_a2") await c1.upload_blob(data) - c2 = container.get_blob_client('blob_a3') + c2 = container.get_blob_client("blob_a3") await c2.upload_blob(data) - c3 = container.get_blob_client('blob_b1') + c3 = container.get_blob_client("blob_b1") await c3.upload_blob(data) # Act @@ -1316,8 +1311,8 @@ async def test_list_blobs_with_num_results(self, **kwargs): # Assert assert blobs is not None assert len(blobs) == 2 - self.assertNamedItemInContainer(generator.current_page, 'blob_a1') - self.assertNamedItemInContainer(generator.current_page, 'blob_a2') + self.assertNamedItemInContainer(generator.current_page, "blob_a1") + self.assertNamedItemInContainer(generator.current_page, "blob_a2") @BlobPreparer() @recorded_by_proxy_async @@ -1327,11 +1322,11 @@ async def test_list_blobs_with_include_snapshots(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') + data = b"hello world" + blob1 = container.get_blob_client("blob1") await blob1.upload_blob(data) await blob1.create_snapshot() - await (container.get_blob_client('blob2')).upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) # Act blobs = [] @@ -1340,11 +1335,11 @@ async def test_list_blobs_with_include_snapshots(self, **kwargs): # Assert assert len(blobs) == 3 - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].snapshot is not None - assert blobs[1].name == 'blob1' + assert blobs[1].name == "blob1" assert blobs[1].snapshot is None - assert blobs[2].name == 'blob2' + assert blobs[2].name == "blob2" assert blobs[2].snapshot is None @BlobPreparer() @@ -1355,12 +1350,12 @@ async def test_list_blobs_with_include_metadata(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') - await blob1.upload_blob(data, metadata={'number': '1', 'name': 'bob'}) + data = b"hello world" + blob1 = container.get_blob_client("blob1") + await blob1.upload_blob(data, metadata={"number": "1", "name": "bob"}) await blob1.create_snapshot() - cr = container.get_blob_client('blob2') - await cr.upload_blob(data, metadata={'number': '2', 'name': 'car'}) + cr = container.get_blob_client("blob2") + await cr.upload_blob(data, metadata={"number": "2", "name": "car"}) # Act blobs = [] @@ -1369,12 +1364,12 @@ async def test_list_blobs_with_include_metadata(self, **kwargs): # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[0].metadata['number'] == '1' - assert blobs[0].metadata['name'] == 'bob' - assert blobs[1].name == 'blob2' - assert blobs[1].metadata['number'] == '2' - assert blobs[1].metadata['name'] == 'car' + assert blobs[0].name == "blob1" + assert blobs[0].metadata["number"] == "1" + assert blobs[0].metadata["name"] == "bob" + assert blobs[1].name == "blob2" + assert blobs[1].metadata["number"] == "2" + assert blobs[1].metadata["name"] == "car" @BlobPreparer() @recorded_by_proxy_async @@ -1384,21 +1379,25 @@ async def test_list_blobs_include_deletedwithversion(self, **kwargs): bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - content_settings = ContentSettings( - content_language='spanish', - content_disposition='inline') - blob1 = container.get_blob_client('blob1') - resp = await blob1.upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '1', 'name': 'bob'}) - version_id_1 = resp['version_id'] + data = b"hello world" + content_settings = ContentSettings(content_language="spanish", content_disposition="inline") + blob1 = container.get_blob_client("blob1") + resp = await blob1.upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "1", "name": "bob"} + ) + version_id_1 = resp["version_id"] await blob1.upload_blob(b"abc", overwrite=True) root_content = b"cde" - root_version_id = (await blob1.upload_blob(root_content, overwrite=True))['version_id'] + root_version_id = (await blob1.upload_blob(root_content, overwrite=True))["version_id"] # this will delete the root blob, while you can still access it through versioning await blob1.delete_blob() - await container.get_blob_client('blob2').upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '2', 'name': 'car'}) - await container.get_blob_client('blob3').upload_blob(data, overwrite=True, content_settings=content_settings, metadata={'number': '2', 'name': 'car'}) + await container.get_blob_client("blob2").upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "2", "name": "car"} + ) + await container.get_blob_client("blob3").upload_blob( + data, overwrite=True, content_settings=content_settings, metadata={"number": "2", "name": "car"} + ) # Act blobs = list() @@ -1410,13 +1409,13 @@ async def test_list_blobs_include_deletedwithversion(self, **kwargs): downloaded_original_content = await (await blob1.download_blob(version_id=version_id_1)).readall() # Assert - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].has_versions_only assert root_content == downloaded_root_content assert data == downloaded_original_content - assert blobs[1].name == 'blob2' + assert blobs[1].name == "blob2" assert not blobs[1].has_versions_only - assert blobs[2].name == 'blob3' + assert blobs[2].name == "blob3" assert not blobs[2].has_versions_only @BlobPreparer() @@ -1427,14 +1426,14 @@ async def test_list_blobs_with_include_uncommittedblobs(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') - await blob1.stage_block('1', b'AAA') - await blob1.stage_block('2', b'BBB') - await blob1.stage_block('3', b'CCC') + data = b"hello world" + blob1 = container.get_blob_client("blob1") + await blob1.stage_block("1", b"AAA") + await blob1.stage_block("2", b"BBB") + await blob1.stage_block("3", b"CCC") - blob2 = container.get_blob_client('blob2') - await blob2.upload_blob(data, metadata={'number': '2', 'name': 'car'}) + blob2 = container.get_blob_client("blob2") + await blob2.upload_blob(data, metadata={"number": "2", "name": "car"}) # Act blobs = [] @@ -1443,8 +1442,8 @@ async def test_list_blobs_with_include_uncommittedblobs(self, **kwargs): # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[1].name == 'blob2' + assert blobs[0].name == "blob1" + assert blobs[1].name == "blob2" @BlobPreparer() @recorded_by_proxy_async @@ -1454,14 +1453,14 @@ async def test_list_blobs_with_include_copy(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - await (container.get_blob_client('blob1')).upload_blob(data, metadata={'status': 'original'}) - sourceblob = 'https://{0}.blob.core.windows.net/{1}/blob1'.format( - storage_account_name, - container.container_name) + data = b"hello world" + await container.get_blob_client("blob1").upload_blob(data, metadata={"status": "original"}) + sourceblob = "https://{0}.blob.core.windows.net/{1}/blob1".format( + storage_account_name, container.container_name + ) - blobcopy = container.get_blob_client('blob1copy') - await blobcopy.start_copy_from_url(sourceblob, metadata={'status': 'copy'}) + blobcopy = container.get_blob_client("blob1copy") + await blobcopy.start_copy_from_url(sourceblob, metadata={"status": "copy"}) # Act blobs = [] @@ -1470,22 +1469,22 @@ async def test_list_blobs_with_include_copy(self, **kwargs): # Assert assert len(blobs) == 2 - assert blobs[0].name == 'blob1' - assert blobs[1].name == 'blob1copy' + assert blobs[0].name == "blob1" + assert blobs[1].name == "blob1copy" assert blobs[1].blob_type == blobs[0].blob_type assert blobs[1].size == 11 - assert blobs[1].content_settings.content_type == 'application/octet-stream' + assert blobs[1].content_settings.content_type == "application/octet-stream" assert blobs[1].content_settings.cache_control == None assert blobs[1].content_settings.content_encoding == None assert blobs[1].content_settings.content_language == None assert blobs[1].content_settings.content_disposition == None assert blobs[1].content_settings.content_md5 != None - assert blobs[1].lease.status == 'unlocked' - assert blobs[1].lease.state == 'available' + assert blobs[1].lease.status == "unlocked" + assert blobs[1].lease.state == "available" assert blobs[1].copy.id != None assert blobs[1].copy.source == sourceblob - assert blobs[1].copy.status == 'success' - assert blobs[1].copy.progress == '11/11' + assert blobs[1].copy.status == "success" + assert blobs[1].copy.progress == "11/11" assert blobs[1].copy.completion_time != None @BlobPreparer() @@ -1496,15 +1495,15 @@ async def test_list_blobs_with_delimiter(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - cr0 = container.get_blob_client('a/blob1') + cr0 = container.get_blob_client("a/blob1") await cr0.upload_blob(data) - cr1 = container.get_blob_client('a/blob2') + cr1 = container.get_blob_client("a/blob2") await cr1.upload_blob(data) - cr2 = container.get_blob_client('b/blob3') + cr2 = container.get_blob_client("b/blob3") await cr2.upload_blob(data) - cr4 = container.get_blob_client('blob4') + cr4 = container.get_blob_client("blob4") await cr4.upload_blob(data) # Act @@ -1515,9 +1514,9 @@ async def test_list_blobs_with_delimiter(self, **kwargs): # Assert assert resp is not None assert len(resp) == 3 - self.assertNamedItemInContainer(resp, 'a/') - self.assertNamedItemInContainer(resp, 'b/') - self.assertNamedItemInContainer(resp, 'blob4') + self.assertNamedItemInContainer(resp, "a/") + self.assertNamedItemInContainer(resp, "b/") + self.assertNamedItemInContainer(resp, "blob4") @BlobPreparer() @recorded_by_proxy_async @@ -1526,20 +1525,20 @@ async def test_find_blobs_by_tags(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) - container = await self._create_container(bsc, 'testfind') + container = await self._create_container(bsc, "testfind") - data = b'hello world' + data = b"hello world" tags = {"tag1": "tagone", "tag2": "tagtwo", "tag3": "tagthree"} - other_tags = {'tag1': 'other'} + other_tags = {"tag1": "other"} filter_expression = "tag1='tagone' and tag2='tagtwo'" - c1 = container.get_blob_client('blob1') + c1 = container.get_blob_client("blob1") await c1.upload_blob(data, tags=tags) - c2 = container.get_blob_client('blob2') + c2 = container.get_blob_client("blob2") await c2.upload_blob(data, tags=tags) - c3 = container.get_blob_client('blob3') + c3 = container.get_blob_client("blob3") await c3.upload_blob(data, tags=tags) - c4 = container.get_blob_client('blob4') + c4 = container.get_blob_client("blob4") await c4.upload_blob(data, tags=other_tags) if self.is_live: @@ -1559,9 +1558,9 @@ async def test_find_blobs_by_tags(self, **kwargs): # Assert assert 2 == len(items_on_page1) assert 1 == len(items_on_page2) - assert len(items_on_page2[0]['tags']) == 2 - assert items_on_page2[0]['tags']['tag1'] == 'tagone' - assert items_on_page2[0]['tags']['tag2'] == 'tagtwo' + assert len(items_on_page2[0]["tags"]) == 2 + assert items_on_page2[0]["tags"]["tag1"] == "tagone" + assert items_on_page2[0]["tags"]["tag2"] == "tagtwo" def test_batch_delete_empty_blob_list(self): container_client = ContainerClient("https://mystorageaccount.blob.core.windows.net", "container") @@ -1578,22 +1577,24 @@ async def test_delete_blobs_simple(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") await blob_client1.upload_blob(data) - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) except: pass # Act - response = await self._to_list(await container.delete_blobs( - await blob_client1.get_blob_properties(), - 'blob2', - 'blob3', - )) + response = await self._to_list( + await container.delete_blobs( + await blob_client1.get_blob_properties(), + "blob2", + "blob3", + ) + ) assert len(response) == 3 assert response[0].status_code == 202 assert response[1].status_code == 202 @@ -1609,7 +1610,7 @@ async def test_delete_blob_with_properties_versioning(self, **kwargs): container: ContainerClient = await self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(blob_data, overwrite=True) @@ -1620,15 +1621,16 @@ async def test_delete_blob_with_properties_versioning(self, **kwargs): v3_props = await blob_client.get_blob_properties() # Act - await container.delete_blob(v2_props, version_id=v1_props['version_id']) + await container.delete_blob(v2_props, version_id=v1_props["version_id"]) await container.delete_blob(v2_props) # Assert with pytest.raises(HttpResponseError): deleted = container.get_blob_client(v1_props) await deleted.get_blob_properties() - assert (await blob_client.get_blob_properties(version_id=v3_props['version_id'])).get("version_id") == v3_props[ - 'version_id'] + assert (await blob_client.get_blob_properties(version_id=v3_props["version_id"])).get("version_id") == v3_props[ + "version_id" + ] @pytest.mark.live_test_only @BlobPreparer() @@ -1645,27 +1647,25 @@ async def test_batch_blobs_with_container_sas(self, **kwargs): container_name, account_key=storage_account_key, permission=ContainerSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) container_client = bsc.get_container_client(container_name) await container_client.create_container() container = ContainerClient.from_container_url(container_client.url, credential=sas_token) - data = b'hello world' + data = b"hello world" try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") await blob_client1.upload_blob(data) - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) except: pass # Act - response = await self._to_list(await container.delete_blobs( - await blob_client1.get_blob_properties(), - 'blob2', - 'blob3' - )) + response = await self._to_list( + await container.delete_blobs(await blob_client1.get_blob_properties(), "blob2", "blob3") + ) assert len(response) == 3 assert response[0].status_code == 202 assert response[1].status_code == 202 @@ -1681,14 +1681,14 @@ async def test_delete_blobs_with_if_tags(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(blob_storage_account_name, "blob"), blob_storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} try: - blob_client1 = container.get_blob_client('blob1') + blob_client1 = container.get_blob_client("blob1") await blob_client1.upload_blob(data, overwrite=True, tags=tags) - await container.get_blob_client('blob2').upload_blob(data, overwrite=True, tags=tags) - await container.get_blob_client('blob3').upload_blob(data, overwrite=True, tags=tags) + await container.get_blob_client("blob2").upload_blob(data, overwrite=True, tags=tags) + await container.get_blob_client("blob3").upload_blob(data, overwrite=True, tags=tags) except: pass @@ -1697,17 +1697,9 @@ async def test_delete_blobs_with_if_tags(self, **kwargs): # Act with pytest.raises(PartialBatchErrorException): - await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag WRONG'" - ) + await container.delete_blobs("blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag WRONG'") blob_list = await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag'" + "blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag'" ) response = list() @@ -1732,43 +1724,39 @@ async def test_delete_blobs_and_snapshot_using_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), sas_token) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" # blob with snapshot - blob_client1 = container.get_blob_client('bloba') + blob_client1 = container.get_blob_client("bloba") await blob_client1.upload_blob(data, overwrite=True) snapshot = await blob_client1.create_snapshot() - blob_client2 = container.get_blob_client('blobb') + blob_client2 = container.get_blob_client("blobb") await blob_client2.upload_blob(data, overwrite=True) - blob_client3 = container.get_blob_client('blobc') + blob_client3 = container.get_blob_client("blobc") await blob_client3.upload_blob(data, overwrite=True) # blob with lease - blob_client4 = container.get_blob_client('blobd') + blob_client4 = container.get_blob_client("blobd") await blob_client4.upload_blob(data, overwrite=True) lease = await blob_client4.acquire_lease() # Act blob_props = await blob_client1.get_blob_properties() - blob_props.snapshot = snapshot['snapshot'] + blob_props.snapshot = snapshot["snapshot"] blob_props_d = dict() - blob_props_d['name'] = "blobd" - blob_props_d['delete_snapshots'] = "include" - blob_props_d['lease_id'] = lease.id - - response = await self._to_list(await container.delete_blobs( - blob_props, - 'blobb', - 'blobc', - blob_props_d, - timeout=3 - )) + blob_props_d["name"] = "blobd" + blob_props_d["delete_snapshots"] = "include" + blob_props_d["lease_id"] = lease.id + + response = await self._to_list( + await container.delete_blobs(blob_props, "blobb", "blobc", blob_props_d, timeout=3) + ) response = list(response) assert len(response) == 4 assert response[0].status_code == 202 @@ -1786,22 +1774,19 @@ async def test_delete_blobs_simple_no_raise(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - await container.get_blob_client('blob1').upload_blob(data) - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob1").upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) except: pass # Act - response = await self._to_list(await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - )) + response = await self._to_list( + await container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) + ) assert len(response) == 3 assert response[0].status_code == 202 assert response[1].status_code == 202 @@ -1817,31 +1802,28 @@ async def test_delete_blobs_with_version_id(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - blob = bsc.get_blob_client(container.container_name, 'blob1') + blob = bsc.get_blob_client(container.container_name, "blob1") await blob.upload_blob(data, length=len(data)) - await container.get_blob_client('blob2').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) except: pass # Act - blob = bsc.get_blob_client(container.container_name, 'blob1') + blob = bsc.get_blob_client(container.container_name, "blob1") old_blob_version_id = (await blob.get_blob_properties()).get("version_id") - await blob.stage_block(block_id='1', data="Test Content") - await blob.commit_block_list(['1']) + await blob.stage_block(block_id="1", data="Test Content") + await blob.commit_block_list(["1"]) new_blob_version_id = (await blob.get_blob_properties()).get("version_id") assert old_blob_version_id != new_blob_version_id blob1_del_data = dict() - blob1_del_data['name'] = 'blob1' - blob1_del_data['version_id'] = old_blob_version_id + blob1_del_data["name"] = "blob1" + blob1_del_data["version_id"] = old_blob_version_id - response = await self._to_list(await container.delete_blobs( - blob1_del_data, - 'blob2' - )) + response = await self._to_list(await container.delete_blobs(blob1_del_data, "blob2")) # Assert assert len(response) == 2 @@ -1860,7 +1842,7 @@ async def test_delete_blobs_with_properties_versioning(self, **kwargs): container: ContainerClient = await self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(blob_data, overwrite=True) @@ -1871,18 +1853,16 @@ async def test_delete_blobs_with_properties_versioning(self, **kwargs): v3_props = await blob_client.get_blob_properties() # Act - response = await self._to_list(await container.delete_blobs( - v1_props, - v2_props - )) + response = await self._to_list(await container.delete_blobs(v1_props, v2_props)) remaining_blob = container.get_blob_client(v3_props) # Assert assert len(response) == 2 assert response[0].status_code == 202 assert response[1].status_code == 202 - assert (await remaining_blob.get_blob_properties(version_id=v3_props['version_id'])).get("version_id") == \ - v3_props['version_id'] + assert (await remaining_blob.get_blob_properties(version_id=v3_props["version_id"])).get( + "version_id" + ) == v3_props["version_id"] @pytest.mark.live_test_only @BlobPreparer() @@ -1894,27 +1874,24 @@ async def test_delete_blobs_snapshot(self, **kwargs): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" try: - blob1_client = container.get_blob_client('blob1') + blob1_client = container.get_blob_client("blob1") await blob1_client.upload_blob(data) await blob1_client.create_snapshot() - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) except: pass - blobs = await self._to_list(container.list_blobs(include='snapshots')) + blobs = await self._to_list(container.list_blobs(include="snapshots")) assert len(blobs) == 4 # 3 blobs + 1 snapshot # Act try: - response = await self._to_list(await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - delete_snapshots='only' - )) + response = await self._to_list( + await container.delete_blobs("blob1", "blob2", "blob3", delete_snapshots="only") + ) except PartialBatchErrorException as err: parts_list = err.parts assert len(parts_list) == 3 @@ -1922,7 +1899,7 @@ async def test_delete_blobs_snapshot(self, **kwargs): assert parts_list[1].status_code == 404 # There was no snapshot assert parts_list[2].status_code == 404 # There was no snapshot - blobs = await self._to_list(container.list_blobs(include='snapshots')) + blobs = await self._to_list(container.list_blobs(include="snapshots")) assert len(blobs) == 3 # 3 blobs @pytest.mark.live_test_only @@ -1938,23 +1915,25 @@ async def test_standard_blob_tier_set_tier_api_batch(self, **kwargs): for tier in tiers: try: - blob = container.get_blob_client('blob1') - data = b'hello world' + blob = container.get_blob_client("blob1") + data = b"hello world" await blob.upload_blob(data) - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) blob_ref = await blob.get_blob_properties() assert blob_ref.blob_tier is not None assert blob_ref.blob_tier_inferred assert blob_ref.blob_tier_change_time is None - parts = await self._to_list(await container.set_standard_blob_tier_blobs( - tier, - 'blob1', - 'blob2', - 'blob3', - )) + parts = await self._to_list( + await container.set_standard_blob_tier_blobs( + tier, + "blob1", + "blob2", + "blob3", + ) + ) assert len(parts) == 3 @@ -1969,9 +1948,9 @@ async def test_standard_blob_tier_set_tier_api_batch(self, **kwargs): finally: await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', + "blob1", + "blob2", + "blob3", ) @pytest.mark.live_test_only @@ -1986,11 +1965,11 @@ async def test_standard_blob_tier_with_if_tags(self, **kwargs): tier = StandardBlobTier.Cool tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} - blob = container.get_blob_client('blob1') - data = b'hello world' + blob = container.get_blob_client("blob1") + data = b"hello world" await blob.upload_blob(data, overwrite=True, tags=tags) - await container.get_blob_client('blob2').upload_blob(data, overwrite=True, tags=tags) - await container.get_blob_client('blob3').upload_blob(data, overwrite=True, tags=tags) + await container.get_blob_client("blob2").upload_blob(data, overwrite=True, tags=tags) + await container.get_blob_client("blob3").upload_blob(data, overwrite=True, tags=tags) blob_ref = await blob.get_blob_properties() assert blob_ref.blob_tier is not None @@ -1999,19 +1978,11 @@ async def test_standard_blob_tier_with_if_tags(self, **kwargs): with pytest.raises(PartialBatchErrorException): await container.set_standard_blob_tier_blobs( - tier, - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag WRONG'" + tier, "blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag WRONG'" ) parts_list = await container.set_standard_blob_tier_blobs( - tier, - 'blob1', - 'blob2', - 'blob3', - if_tags_match_condition="\"tag1\"='firsttag'" + tier, "blob1", "blob2", "blob3", if_tags_match_condition="\"tag1\"='firsttag'" ) parts = list() @@ -2028,12 +1999,7 @@ async def test_standard_blob_tier_with_if_tags(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + await container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.live_test_only @BlobPreparer() @@ -2047,33 +2013,30 @@ async def test_standard_blob_tier_set_tiers_with_sas(self, **kwargs): account_key=storage_account_key, resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), sas_token) container = await self._create_container(bsc) tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: - response = await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) - blob = container.get_blob_client('blob1') - data = b'hello world' + response = await container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) + blob = container.get_blob_client("blob1") + data = b"hello world" await blob.upload_blob(data) - await container.get_blob_client('blob2').upload_blob(data) - await container.get_blob_client('blob3').upload_blob(data) + await container.get_blob_client("blob2").upload_blob(data) + await container.get_blob_client("blob3").upload_blob(data) blob_ref = await blob.get_blob_properties() - parts = await self._to_list(await container.set_standard_blob_tier_blobs( + parts = await self._to_list( + await container.set_standard_blob_tier_blobs( tier, blob_ref, - 'blob2', - 'blob3', - )) + "blob2", + "blob3", + ) + ) parts = list(parts) assert len(parts) == 3 @@ -2087,12 +2050,7 @@ async def test_standard_blob_tier_set_tiers_with_sas(self, **kwargs): assert not blob_ref2.blob_tier_inferred assert blob_ref2.blob_tier_change_time is not None - response = await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', - raise_on_any_failure=False - ) + response = await container.delete_blobs("blob1", "blob2", "blob3", raise_on_any_failure=False) @pytest.mark.skip(reason="Wasn't able to get premium account with batch enabled") @BlobPreparer() @@ -2100,13 +2058,15 @@ async def test_premium_tier_set_tier_api_batch(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, transport=AiohttpTestTransport()) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, transport=AiohttpTestTransport() + ) url = self._get_premium_account_url() credential = self._get_premium_shared_key_credential() pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if not self.is_playback(): @@ -2115,22 +2075,24 @@ async def test_premium_tier_set_tier_api_batch(self, **kwargs): except ResourceExistsError: pass - pblob = container.get_blob_client('blob1') + pblob = container.get_blob_client("blob1") await pblob.create_page_blob(1024) - await container.get_blob_client('blob2').create_page_blob(1024) - await container.get_blob_client('blob3').create_page_blob(1024) + await container.get_blob_client("blob2").create_page_blob(1024) + await container.get_blob_client("blob3").create_page_blob(1024) blob_ref = await pblob.get_blob_properties() assert PremiumPageBlobTier.P10 == blob_ref.blob_tier assert blob_ref.blob_tier is not None assert blob_ref.blob_tier_inferred - parts = await self._to_list(container.set_premium_page_blob_tier_blobs( - PremiumPageBlobTier.P50, - 'blob1', - 'blob2', - 'blob3', - )) + parts = await self._to_list( + container.set_premium_page_blob_tier_blobs( + PremiumPageBlobTier.P50, + "blob1", + "blob2", + "blob3", + ) + ) assert len(parts) == 3 @@ -2138,16 +2100,15 @@ async def test_premium_tier_set_tier_api_batch(self, **kwargs): assert parts[1].status_code in [200, 202] assert parts[2].status_code in [200, 202] - blob_ref2 = await pblob.get_blob_properties() assert PremiumPageBlobTier.P50 == blob_ref2.blob_tier assert not blob_ref2.blob_tier_inferred finally: await container.delete_blobs( - 'blob1', - 'blob2', - 'blob3', + "blob1", + "blob2", + "blob3", ) @BlobPreparer() @@ -2158,21 +2119,22 @@ async def test_walk_blobs_with_delimiter(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - cr0 = container.get_blob_client('a/blob1') + cr0 = container.get_blob_client("a/blob1") await cr0.upload_blob(data) - cr1 = container.get_blob_client('a/blob2') + cr1 = container.get_blob_client("a/blob2") await cr1.upload_blob(data) - cr2 = container.get_blob_client('b/c/blob3') + cr2 = container.get_blob_client("b/c/blob3") await cr2.upload_blob(data) - cr3 = container.get_blob_client('blob4') + cr3 = container.get_blob_client("blob4") await cr3.upload_blob(data) blob_list = [] + async def recursive_walk(prefix): async for b in prefix: - if b.get('prefix'): + if b.get("prefix"): await recursive_walk(b) else: blob_list.append(b.name) @@ -2182,7 +2144,7 @@ async def recursive_walk(prefix): # Assert assert len(blob_list) == 4 - assert blob_list, ['a/blob1', 'a/blob2', 'b/c/blob3' == 'blob4'] + assert blob_list, ["a/blob1", "a/blob2", "b/c/blob3" == "blob4"] @BlobPreparer() @recorded_by_proxy_async @@ -2192,25 +2154,27 @@ async def test_walk_blobs_with_prefix_delimiter_versions(self, **kwargs): bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), versioned_storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - c0 = container.get_blob_client('a/blob1') + c0 = container.get_blob_client("a/blob1") await c0.upload_blob(data) - c1 = container.get_blob_client('a/blob2') + c1 = container.get_blob_client("a/blob2") await c1.upload_blob(data) - c2 = container.get_blob_client('b/blob3') + c2 = container.get_blob_client("b/blob3") await c2.upload_blob(data) # Act - prefix_list = await self._to_list(container.walk_blobs(name_starts_with='a', delimiter='/', include=['versions'])) + prefix_list = await self._to_list( + container.walk_blobs(name_starts_with="a", delimiter="/", include=["versions"]) + ) # Assert assert len(prefix_list) == 1 a = await self._to_list(prefix_list[0]) assert len(a) == 2 - assert a[0].name == 'a/blob1' + assert a[0].name == "a/blob1" assert a[0].version_id - assert a[1].name == 'a/blob2' + assert a[1].name == "a/blob2" assert a[1].version_id @BlobPreparer() @@ -2221,9 +2185,9 @@ async def test_walk_blobs_cold_tier(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - await container.get_blob_client('blob1').upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) + await container.get_blob_client("blob1").upload_blob(data, standard_blob_tier=StandardBlobTier.Cold) # Act resp = [] @@ -2241,13 +2205,13 @@ async def test_list_blobs_with_include_multiple(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' - blob1 = container.get_blob_client('blob1') - await blob1.upload_blob(data, metadata={'number': '1', 'name': 'bob'}) + data = b"hello world" + blob1 = container.get_blob_client("blob1") + await blob1.upload_blob(data, metadata={"number": "1", "name": "bob"}) await blob1.create_snapshot() - client = container.get_blob_client('blob2') - await client.upload_blob(data, metadata={'number': '2', 'name': 'car'}) + client = container.get_blob_client("blob2") + await client.upload_blob(data, metadata={"number": "2", "name": "car"}) # Act blobs = [] @@ -2256,18 +2220,18 @@ async def test_list_blobs_with_include_multiple(self, **kwargs): # Assert assert len(blobs) == 3 - assert blobs[0].name == 'blob1' + assert blobs[0].name == "blob1" assert blobs[0].snapshot is not None - assert blobs[0].metadata['number'] == '1' - assert blobs[0].metadata['name'] == 'bob' - assert blobs[1].name == 'blob1' + assert blobs[0].metadata["number"] == "1" + assert blobs[0].metadata["name"] == "bob" + assert blobs[1].name == "blob1" assert blobs[1].snapshot is None - assert blobs[1].metadata['number'] == '1' - assert blobs[1].metadata['name'] == 'bob' - assert blobs[2].name == 'blob2' + assert blobs[1].metadata["number"] == "1" + assert blobs[1].metadata["name"] == "bob" + assert blobs[2].name == "blob2" assert blobs[2].snapshot is None - assert blobs[2].metadata['number'] == '2' - assert blobs[2].metadata['name'] == 'car' + assert blobs[2].metadata["number"] == "2" + assert blobs[2].metadata["name"] == "car" @pytest.mark.live_test_only @BlobPreparer() @@ -2278,8 +2242,8 @@ async def test_shared_access_container(self, **kwargs): # SAS URL is calculated from storage key, so this test runs live only bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - blob_name = 'blob1' - data = b'hello world' + blob_name = "blob1" + data = b"hello world" blob = container.get_blob_client(blob_name) await blob.upload_blob(data) @@ -2331,7 +2295,7 @@ async def test_web_container_normal_operations_working(self, **kwargs): # get a blob blob_data = await (await blob.download_blob()).readall() assert blob is not None - assert blob_data.decode('utf-8') == blob_content + assert blob_data.decode("utf-8") == blob_content finally: # delete container @@ -2345,7 +2309,7 @@ async def test_download_blob(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" blob_name = self.get_resource_name("blob") blob = container.get_blob_client(blob_name) @@ -2366,7 +2330,7 @@ async def test_download_blob_with_properties_versioning(self, **kwargs): container: ContainerClient = await self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = b'abc' + blob_data = b"abc" blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(blob_data, overwrite=True) @@ -2377,8 +2341,8 @@ async def test_download_blob_with_properties_versioning(self, **kwargs): v3_props = await blob_client.get_blob_properties() # Act - downloaded = await container.download_blob(v2_props, version_id=v1_props['version_id']) - downloaded2 = await container.download_blob(v2_props, version_id=v3_props['version_id']) + downloaded = await container.download_blob(v2_props, version_id=v1_props["version_id"]) + downloaded2 = await container.download_blob(v2_props, version_id=v3_props["version_id"]) # Assert assert (await downloaded.readall()) == blob_data @@ -2390,18 +2354,21 @@ async def test_download_blob_in_chunks_where_maxsinglegetsize_is_multiple_of_chu storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=1024, - max_chunk_get_size=512) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=1024, + max_chunk_get_size=512, + ) container = await self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 + data = b"hello world python storage test chunks" * 1024 blob_name = self.get_resource_name("testiteratechunks") await container.get_blob_client(blob_name).upload_blob(data, overwrite=True) # Act downloader = await container.download_blob(blob_name) - downloaded_data = b'' + downloaded_data = b"" chunk_size_list = list() async for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2419,18 +2386,21 @@ async def test_download_blob_in_chunks_where_maxsinglegetsize_not_multiple_of_ch storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=1024, - max_chunk_get_size=666) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=1024, + max_chunk_get_size=666, + ) container = await self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 + data = b"hello world python storage test chunks" * 1024 blob_name = self.get_resource_name("testiteratechunks") await container.get_blob_client(blob_name).upload_blob(data, overwrite=True) # Act - downloader= await container.download_blob(blob_name) - downloaded_data = b'' + downloader = await container.download_blob(blob_name) + downloaded_data = b"" chunk_size_list = list() async for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2448,18 +2418,21 @@ async def test_download_blob_in_chunks_where_maxsinglegetsize_smallert_than_chun storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, - max_single_get_size=215, - max_chunk_get_size=512) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + storage_account_key, + max_single_get_size=215, + max_chunk_get_size=512, + ) container = await self._create_container(bsc) - data = b'hello world python storage test chunks' * 1024 + data = b"hello world python storage test chunks" * 1024 blob_name = self.get_resource_name("testiteratechunks") blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(data, overwrite=True) downloader = await container.download_blob(blob_name) - downloaded_data = b'' + downloaded_data = b"" chunk_size_list = list() async for chunk in downloader.chunks(): chunk_size_list.append(len(chunk)) @@ -2479,11 +2452,11 @@ async def test_list_blob_names(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container: ContainerClient = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - await (container.get_blob_client('blob1')).upload_blob(data, overwrite=True) - await (container.get_blob_client('blob2')).upload_blob(data, overwrite=True) - await (container.get_blob_client('test1')).upload_blob(data, overwrite=True) + await container.get_blob_client("blob1").upload_blob(data, overwrite=True) + await container.get_blob_client("blob2").upload_blob(data, overwrite=True) + await container.get_blob_client("test1").upload_blob(data, overwrite=True) # Act all_blobs = [] @@ -2496,11 +2469,11 @@ async def test_list_blob_names(self, **kwargs): # Assert assert len(all_blobs) == 3 - assert all_blobs[0] == 'blob1' - assert all_blobs[1] == 'blob2' - assert all_blobs[2] == 'test1' + assert all_blobs[0] == "blob1" + assert all_blobs[1] == "blob2" + assert all_blobs[2] == "test1" assert len(test_blobs) == 1 - assert test_blobs[0] == 'test1' + assert test_blobs[0] == "test1" @BlobPreparer() @recorded_by_proxy_async @@ -2510,11 +2483,11 @@ async def test_list_blob_names_pagination(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) container: ContainerClient = await self._create_container(bsc) - data = b'hello world' + data = b"hello world" - await (container.get_blob_client('blob1')).upload_blob(data, overwrite=True) - await (container.get_blob_client('blob2')).upload_blob(data, overwrite=True) - await (container.get_blob_client('blob3')).upload_blob(data, overwrite=True) + await container.get_blob_client("blob1").upload_blob(data, overwrite=True) + await container.get_blob_client("blob2").upload_blob(data, overwrite=True) + await container.get_blob_client("blob3").upload_blob(data, overwrite=True) # Act blob_pages = container.list_blob_names(results_per_page=2).by_page() @@ -2527,10 +2500,10 @@ async def test_list_blob_names_pagination(self, **kwargs): # Assert assert len(items_on_page1) == 2 - assert items_on_page1[0] == 'blob1' - assert items_on_page1[1] == 'blob2' + assert items_on_page1[0] == "blob1" + assert items_on_page1[1] == "blob2" assert len(items_on_page2) == 1 - assert items_on_page2[0] == 'blob3' + assert items_on_page2[0] == "blob3" @BlobPreparer() @recorded_by_proxy_async @@ -2542,7 +2515,7 @@ async def test_get_blob_client_with_properties_versioning(self, **kwargs): container: ContainerClient = await self._create_container(bsc) blob_name = self.get_resource_name("utcontainer") - blob_data = 'abc' + blob_data = "abc" blob_client = container.get_blob_client(blob_name) # Act @@ -2555,22 +2528,24 @@ async def test_get_blob_client_with_properties_versioning(self, **kwargs): await blob_client.upload_blob(blob_data * 4, overwrite=True) v4_props = await blob_client.get_blob_properties() - v1_blob_client = container.get_blob_client(blob=v1_props['name'], version_id=v1_props['version_id']) + v1_blob_client = container.get_blob_client(blob=v1_props["name"], version_id=v1_props["version_id"]) props1 = await v1_blob_client.get_blob_properties() - v2_blob_client = container.get_blob_client(blob=v1_props, version_id=v2_props['version_id']) + v2_blob_client = container.get_blob_client(blob=v1_props, version_id=v2_props["version_id"]) props2 = await v2_blob_client.get_blob_properties() - v3_blob_client = bsc.get_blob_client(container=container.container_name, blob=v2_props['name'], - version_id=v3_props['version_id']) + v3_blob_client = bsc.get_blob_client( + container=container.container_name, blob=v2_props["name"], version_id=v3_props["version_id"] + ) props3 = await v3_blob_client.get_blob_properties() - v4_blob_client = bsc.get_blob_client(container=container.container_name, blob=v3_props, - version_id=v4_props['version_id']) + v4_blob_client = bsc.get_blob_client( + container=container.container_name, blob=v3_props, version_id=v4_props["version_id"] + ) props4 = await v4_blob_client.get_blob_properties() # Assert - assert props1['version_id'] == v1_props['version_id'] - assert props2['version_id'] == v2_props['version_id'] - assert props3['version_id'] == v3_props['version_id'] - assert props4['version_id'] == v4_props['version_id'] + assert props1["version_id"] == v1_props["version_id"] + assert props2["version_id"] == v2_props["version_id"] + assert props3["version_id"] == v3_props["version_id"] + assert props4["version_id"] == v4_props["version_id"] @BlobPreparer() @recorded_by_proxy_async @@ -2579,16 +2554,18 @@ async def test_storage_account_audience_container_client(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - cc = ContainerClient(self.account_url(storage_account_name, "blob"), 'testcont', storage_account_key) + cc = ContainerClient(self.account_url(storage_account_name, "blob"), "testcont", storage_account_key) await cc.exists() # Act token_credential = self.get_credential(ContainerClient, is_async=True) cc = ContainerClient( - self.account_url(storage_account_name, "blob"), 'testcont', credential=token_credential, - audience=f'https://{storage_account_name}.blob.core.windows.net' + self.account_url(storage_account_name, "blob"), + "testcont", + credential=token_credential, + audience=f"https://{storage_account_name}.blob.core.windows.net", ) # Assert response = await cc.exists() - assert response is not None \ No newline at end of file + assert response is not None diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk.py b/sdk/storage/azure-storage-blob/tests/test_cpk.py index 9f7f2e9485b3..3e73b3354ff6 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk.py @@ -30,7 +30,7 @@ class TestStorageCPK(StorageRecordedTestCase): def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") # prep some test data so that they can be used in upload tests self.byte_data = self.get_random_bytes(10 * 1024) @@ -53,23 +53,19 @@ def _get_blob_reference(self): def _create_block_blob(self, bsc, blob_name=None, data=None, cpk=None, max_concurrency=1): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) - data = data if data else b'' + data = data if data else b"" resp = blob_client.upload_blob(data, cpk=cpk, max_concurrency=max_concurrency) return blob_client, resp def _create_append_blob(self, bsc, cpk=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) blob.create_append_blob(cpk=cpk) return blob def _create_page_blob(self, bsc, cpk=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) blob.create_page_blob(1024 * 1024, cpk=cpk) return blob @@ -89,22 +85,22 @@ def test_put_block_and_put_block_list(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client, _ = self._create_block_blob(bsc) - blob_client.stage_block('1', b'AAA', cpk=TEST_ENCRYPTION_KEY) - blob_client.stage_block('2', b'BBB', cpk=TEST_ENCRYPTION_KEY) - blob_client.stage_block('3', b'CCC', cpk=TEST_ENCRYPTION_KEY) + blob_client.stage_block("1", b"AAA", cpk=TEST_ENCRYPTION_KEY) + blob_client.stage_block("2", b"BBB", cpk=TEST_ENCRYPTION_KEY) + blob_client.stage_block("3", b"CCC", cpk=TEST_ENCRYPTION_KEY) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - put_block_list_resp = blob_client.commit_block_list(block_list, - cpk=TEST_ENCRYPTION_KEY) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + put_block_list_resp = blob_client.commit_block_list(block_list, cpk=TEST_ENCRYPTION_KEY) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] # assert put_block_list_resp['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -115,9 +111,9 @@ def test_put_block_and_put_block_list(self, **kwargs): blob = blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert blob.readall() == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -134,7 +130,8 @@ def test_create_block_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # Arrange # to force the in-memory chunks to be used @@ -142,13 +139,14 @@ def test_create_block_blob_with_chunks(self, **kwargs): # Act # create_blob_from_bytes forces the in-memory chunks to be used - blob_client, upload_response = self._create_block_blob(bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, - max_concurrency=2) + blob_client, upload_response = self._create_block_blob( + bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -160,8 +158,8 @@ def test_create_block_blob_with_chunks(self, **kwargs): # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -178,18 +176,20 @@ def test_create_block_blob_with_sub_streams(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # Act # create_blob_from_bytes forces the in-memory chunks to be used - blob_client, upload_response = self._create_block_blob(bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, - max_concurrency=2) + blob_client, upload_response = self._create_block_blob( + bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -201,8 +201,8 @@ def test_create_block_blob_with_sub_streams(self, **kwargs): # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -220,16 +220,17 @@ def test_create_block_blob_with_single_chunk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - data = b'AAABBBCCC' + data = b"AAABBBCCC" # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = self._create_block_blob(bsc, data=data, cpk=TEST_ENCRYPTION_KEY) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -241,8 +242,8 @@ def test_create_block_blob_with_single_chunk(self, **kwargs): # Assert content was retrieved with the cpk assert blob.readall() == data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -260,7 +261,8 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # create source blob and get source blob url source_blob_name = self.get_resource_name("sourceblob") @@ -274,7 +276,7 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -283,40 +285,43 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): destination_blob_client, _ = self._create_block_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act part 1: make put block from url calls - destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url, - source_offset=0, source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) - destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url, - source_offset=4 * 1024, source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) + destination_blob_client.stage_block_from_url( + block_id=1, source_url=source_blob_url, source_offset=0, source_length=4 * 1024, cpk=TEST_ENCRYPTION_KEY + ) + destination_blob_client.stage_block_from_url( + block_id=2, + source_url=source_blob_url, + source_offset=4 * 1024, + source_length=4 * 1024, + cpk=TEST_ENCRYPTION_KEY, + ) # Assert blocks - committed, uncommitted = destination_blob_client.get_block_list('all') + committed, uncommitted = destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # commit the blocks without cpk should fail - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2")] with pytest.raises(HttpResponseError): destination_blob_client.commit_block_list(block_list) # Act commit the blocks with cpk should succeed - put_block_list_resp = destination_blob_client.commit_block_list(block_list, - cpk=TEST_ENCRYPTION_KEY) + put_block_list_resp = destination_blob_client.commit_block_list(block_list, cpk=TEST_ENCRYPTION_KEY) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] # assert put_block_list_resp['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content blob = destination_blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert blob.readall() == self.byte_data[0: 8 * 1024] - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == self.byte_data[0 : 8 * 1024] + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -334,18 +339,19 @@ def test_append_block(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - for content in [b'AAA', b'BBB', b'CCC']: + for content in [b"AAA", b"BBB", b"CCC"]: append_blob_prop = blob_client.append_block(content, cpk=TEST_ENCRYPTION_KEY) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -356,7 +362,7 @@ def test_append_block(self, **kwargs): blob = blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert blob.readall() == b'AAABBBCCC' + assert blob.readall() == b"AAABBBCCC" # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -373,7 +379,8 @@ def test_append_block_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -387,7 +394,7 @@ def test_append_block_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -395,15 +402,14 @@ def test_append_block_from_url(self, **kwargs): destination_blob_client = self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - append_blob_prop = destination_blob_client.append_block_from_url(source_blob_url, - source_offset=0, - source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) + append_blob_prop = destination_blob_client.append_block_from_url( + source_blob_url, source_offset=0, source_length=4 * 1024, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -414,7 +420,7 @@ def test_append_block_from_url(self, **kwargs): blob = destination_blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert blob.readall() == self.byte_data[0: 4 * 1024] + assert blob.readall() == self.byte_data[0 : 4 * 1024] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash self._teardown(bsc) @@ -432,18 +438,20 @@ def test_create_append_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - append_blob_prop = blob_client.upload_blob(self.byte_data, - blob_type=BlobType.AppendBlob, cpk=TEST_ENCRYPTION_KEY) + append_blob_prop = blob_client.upload_blob( + self.byte_data, blob_type=BlobType.AppendBlob, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -472,20 +480,20 @@ def test_update_page(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_page_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - page_blob_prop = blob_client.upload_page(self.byte_data, - offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = blob_client.upload_page( + self.byte_data, offset=0, length=len(self.byte_data), cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -493,9 +501,11 @@ def test_update_page(self, **kwargs): blob_client.download_blob() # Act get the blob content - blob = blob_client.download_blob(offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY, ) + blob = blob_client.download_blob( + offset=0, + length=len(self.byte_data), + cpk=TEST_ENCRYPTION_KEY, + ) # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data @@ -516,7 +526,8 @@ def test_update_page_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -530,7 +541,7 @@ def test_update_page_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -538,16 +549,14 @@ def test_update_page_from_url(self, **kwargs): blob_client = self._create_page_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - page_blob_prop = blob_client.upload_pages_from_url(source_blob_url, - offset=0, - length=len(self.byte_data), - source_offset=0, - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = blob_client.upload_pages_from_url( + source_blob_url, offset=0, length=len(self.byte_data), source_offset=0, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -555,9 +564,7 @@ def test_update_page_from_url(self, **kwargs): blob_client.download_blob() # Act get the blob content - blob = blob_client.download_blob(offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY) + blob = blob_client.download_blob(offset=0, length=len(self.byte_data), cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data @@ -578,18 +585,18 @@ def test_create_page_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - page_blob_prop = blob_client.upload_blob(self.byte_data, - blob_type=BlobType.PageBlob, - max_concurrency=2, - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = blob_client.upload_blob( + self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -620,9 +627,10 @@ def test_get_set_blob_metadata(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) + blob_client, _ = self._create_block_blob(bsc, data=b"AAABBBCCC", cpk=TEST_ENCRYPTION_KEY) # Act without the encryption key should fail with pytest.raises(HttpResponseError): @@ -636,7 +644,7 @@ def test_get_set_blob_metadata(self, **kwargs): # assert blob_props.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash # Act set blob properties - metadata = {'hello': 'world', 'number': '42', 'up': 'upval'} + metadata = {"hello": "world", "number": "42", "up": "upval"} with pytest.raises(HttpResponseError): blob_client.set_blob_metadata( metadata=metadata, @@ -648,10 +656,10 @@ def test_get_set_blob_metadata(self, **kwargs): blob_props = blob_client.get_blob_properties(cpk=TEST_ENCRYPTION_KEY) md = blob_props.metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['up'] == 'upval' - assert not 'Up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["up"] == "upval" + assert not "Up" in md self._teardown(bsc) @BlobPreparer() @@ -670,9 +678,10 @@ def test_snapshot_blob(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) + blob_client, _ = self._create_block_blob(bsc, data=b"AAABBBCCC", cpk=TEST_ENCRYPTION_KEY) # Act without cpk should not work with pytest.raises(HttpResponseError): diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_async.py b/sdk/storage/azure-storage-blob/tests/test_cpk_async.py index 14610b6beb25..cfa5c28e7e24 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_async.py @@ -32,7 +32,7 @@ class TestStorageCPKAsync(AsyncStorageRecordedTestCase): async def _setup(self, bsc): self.config = bsc._config self.byte_data = self.get_random_bytes(10 * 1024) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: await bsc.create_container(self.container_name) @@ -52,23 +52,19 @@ def _get_blob_reference(self): async def _create_block_blob(self, bsc, blob_name=None, data=None, cpk=None, max_concurrency=1): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) - data = data if data else b'' + data = data if data else b"" resp = await blob_client.upload_blob(data, cpk=cpk, max_concurrency=max_concurrency) return blob_client, resp async def _create_append_blob(self, bsc, cpk=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) await blob.create_append_blob(cpk=cpk) return blob async def _create_page_blob(self, bsc, cpk=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) await blob.create_page_blob(1024 * 1024, cpk=cpk) return blob @@ -87,23 +83,23 @@ async def test_put_block_and_put_block_list(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") blob_client, _ = await self._create_block_blob(bsc) - await blob_client.stage_block('1', b'AAA', cpk=TEST_ENCRYPTION_KEY) - await blob_client.stage_block('2', b'BBB', cpk=TEST_ENCRYPTION_KEY) - await blob_client.stage_block('3', b'CCC', cpk=TEST_ENCRYPTION_KEY) + await blob_client.stage_block("1", b"AAA", cpk=TEST_ENCRYPTION_KEY) + await blob_client.stage_block("2", b"BBB", cpk=TEST_ENCRYPTION_KEY) + await blob_client.stage_block("3", b"CCC", cpk=TEST_ENCRYPTION_KEY) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - put_block_list_resp = await blob_client.commit_block_list(block_list, - cpk=TEST_ENCRYPTION_KEY) + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] + put_block_list_resp = await blob_client.commit_block_list(block_list, cpk=TEST_ENCRYPTION_KEY) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] # assert put_block_list_resp['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -114,9 +110,9 @@ async def test_put_block_and_put_block_list(self, **kwargs): blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert await blob.readall() == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert await blob.readall() == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @pytest.mark.live_test_only @@ -132,20 +128,22 @@ async def test_create_block_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # to force the in-memory chunks to be used self.config.use_byte_buffer = True # Act # create_blob_from_bytes forces the in-memory chunks to be used - blob_client, upload_response = await self._create_block_blob(bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, - max_concurrency=2) + blob_client, upload_response = await self._create_block_blob( + bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -157,8 +155,8 @@ async def test_create_block_blob_with_chunks(self, **kwargs): # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @pytest.mark.live_test_only @@ -176,18 +174,20 @@ async def test_create_block_blob_with_sub_streams(self, **kwargs): min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, - retry_total=0) + retry_total=0, + ) await self._setup(bsc) # to force the in-memory chunks to be used self.config.use_byte_buffer = True - blob_client, upload_response = await self._create_block_blob(bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, - max_concurrency=2) + blob_client, upload_response = await self._create_block_blob( + bsc, data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -199,8 +199,8 @@ async def test_create_block_blob_with_sub_streams(self, **kwargs): # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -216,16 +216,17 @@ async def test_create_block_blob_with_single_chunk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - data = b'AAABBBCCC' + data = b"AAABBBCCC" # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = await self._create_block_blob(bsc, data=data, cpk=TEST_ENCRYPTION_KEY) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] # assert upload_response['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -237,8 +238,8 @@ async def test_create_block_blob_with_single_chunk(self, **kwargs): # Assert content was retrieved with the cpk assert await blob.readall() == data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -254,7 +255,8 @@ async def test_put_block_from_url_and_commit(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # create source blob and get source blob url @@ -269,7 +271,7 @@ async def test_put_block_from_url_and_commit(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -278,40 +280,43 @@ async def test_put_block_from_url_and_commit(self, **kwargs): destination_blob_client, _ = await self._create_block_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act part 1: make put block from url calls - await destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url, - source_offset=0, source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) - await destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url, - source_offset=4 * 1024, source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) + await destination_blob_client.stage_block_from_url( + block_id=1, source_url=source_blob_url, source_offset=0, source_length=4 * 1024, cpk=TEST_ENCRYPTION_KEY + ) + await destination_blob_client.stage_block_from_url( + block_id=2, + source_url=source_blob_url, + source_offset=4 * 1024, + source_length=4 * 1024, + cpk=TEST_ENCRYPTION_KEY, + ) # Assert blocks - committed, uncommitted = await destination_blob_client.get_block_list('all') + committed, uncommitted = await destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # commit the blocks without cpk should fail - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2")] with pytest.raises(HttpResponseError): await destination_blob_client.commit_block_list(block_list) # Act commit the blocks with cpk should succeed - put_block_list_resp = await destination_blob_client.commit_block_list(block_list, - cpk=TEST_ENCRYPTION_KEY) + put_block_list_resp = await destination_blob_client.commit_block_list(block_list, cpk=TEST_ENCRYPTION_KEY) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] # assert put_block_list_resp['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content blob = await destination_blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert await blob.readall() == self.byte_data[0: 8 * 1024] - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert await blob.readall() == self.byte_data[0 : 8 * 1024] + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -327,18 +332,19 @@ async def test_append_block(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - for content in [b'AAA', b'BBB', b'CCC']: + for content in [b"AAA", b"BBB", b"CCC"]: append_blob_prop = await blob_client.append_block(content, cpk=TEST_ENCRYPTION_KEY) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -349,7 +355,7 @@ async def test_append_block(self, **kwargs): blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert await blob.readall() == b'AAABBBCCC' + assert await blob.readall() == b"AAABBBCCC" # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -365,7 +371,8 @@ async def test_append_block_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -379,7 +386,7 @@ async def test_append_block_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -387,15 +394,14 @@ async def test_append_block_from_url(self, **kwargs): destination_blob_client = await self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - append_blob_prop = await destination_blob_client.append_block_from_url(source_blob_url, - source_offset=0, - source_length=4 * 1024, - cpk=TEST_ENCRYPTION_KEY) + append_blob_prop = await destination_blob_client.append_block_from_url( + source_blob_url, source_offset=0, source_length=4 * 1024, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -406,7 +412,7 @@ async def test_append_block_from_url(self, **kwargs): blob = await destination_blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk - assert await blob.readall() == self.byte_data[0: 4 * 1024] + assert await blob.readall() == self.byte_data[0 : 4 * 1024] # assert blob.properties.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash @BlobPreparer() @@ -422,18 +428,20 @@ async def test_create_append_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_append_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - append_blob_prop = await blob_client.upload_blob(self.byte_data, - blob_type=BlobType.AppendBlob, cpk=TEST_ENCRYPTION_KEY) + append_blob_prop = await blob_client.upload_blob( + self.byte_data, blob_type=BlobType.AppendBlob, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] # assert append_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -460,20 +468,20 @@ async def test_update_page(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_page_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - page_blob_prop = await blob_client.upload_page(self.byte_data, - offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = await blob_client.upload_page( + self.byte_data, offset=0, length=len(self.byte_data), cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -481,9 +489,11 @@ async def test_update_page(self, **kwargs): await blob_client.download_blob() # Act get the blob content - blob = await blob_client.download_blob(offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY, ) + blob = await blob_client.download_blob( + offset=0, + length=len(self.byte_data), + cpk=TEST_ENCRYPTION_KEY, + ) # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data @@ -502,7 +512,8 @@ async def test_update_page_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -516,7 +527,7 @@ async def test_update_page_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -524,16 +535,14 @@ async def test_update_page_from_url(self, **kwargs): blob_client = await self._create_page_blob(bsc, cpk=TEST_ENCRYPTION_KEY) # Act - page_blob_prop = await blob_client.upload_pages_from_url(source_blob_url, - offset=0, - length=len(self.byte_data), - source_offset=0, - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = await blob_client.upload_pages_from_url( + source_blob_url, offset=0, length=len(self.byte_data), source_offset=0, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -541,9 +550,11 @@ async def test_update_page_from_url(self, **kwargs): await blob_client.download_blob() # Act get the blob content - blob = await blob_client.download_blob(offset=0, - length=len(self.byte_data), - cpk=TEST_ENCRYPTION_KEY, ) + blob = await blob_client.download_blob( + offset=0, + length=len(self.byte_data), + cpk=TEST_ENCRYPTION_KEY, + ) # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data @@ -561,20 +572,20 @@ async def test_create_page_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # Act blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - page_blob_prop = await blob_client.upload_blob(self.byte_data, - blob_type=BlobType.PageBlob, - max_concurrency=2, - cpk=TEST_ENCRYPTION_KEY) + page_blob_prop = await blob_client.upload_blob( + self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, cpk=TEST_ENCRYPTION_KEY + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] # assert page_blob_prop['encryption_key_sha256'] == TEST_ENCRYPTION_KEY.key_hash # Act get the blob content without cpk should fail @@ -601,9 +612,10 @@ async def test_get_set_blob_metadata(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) + blob_client, _ = await self._create_block_blob(bsc, data=b"AAABBBCCC", cpk=TEST_ENCRYPTION_KEY) # Act without the encryption key should fail with pytest.raises(HttpResponseError): @@ -617,7 +629,7 @@ async def test_get_set_blob_metadata(self, **kwargs): # assert blob_props.encryption_key_sha256 == TEST_ENCRYPTION_KEY.key_hash # Act set blob properties - metadata = {'hello': 'world', 'number': '42', 'up': 'upval'} + metadata = {"hello": "world", "number": "42", "up": "upval"} with pytest.raises(HttpResponseError): await blob_client.set_blob_metadata( metadata=metadata, @@ -629,10 +641,10 @@ async def test_get_set_blob_metadata(self, **kwargs): blob_props = await blob_client.get_blob_properties(cpk=TEST_ENCRYPTION_KEY) md = blob_props.metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['up'] == 'upval' - assert not 'Up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["up"] == "upval" + assert not "Up" in md @BlobPreparer() @recorded_by_proxy_async @@ -647,9 +659,10 @@ async def test_snapshot_blob(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) + blob_client, _ = await self._create_block_blob(bsc, data=b"AAABBBCCC", cpk=TEST_ENCRYPTION_KEY) # Act without cpk should not work with pytest.raises(HttpResponseError): diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_n.py b/sdk/storage/azure-storage-blob/tests/test_cpk_n.py index 022b19707487..1bddf2cc17b7 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_n.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_n.py @@ -33,15 +33,15 @@ TEST_ENCRYPTION_SCOPE_2 = "testscope2" TEST_CONTAINER_ENCRYPTION_SCOPE = ContainerEncryptionScope(default_encryption_scope="testscope1") TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE = ContainerEncryptionScope( - default_encryption_scope="testscope1", - prevent_encryption_scope_override=True) + default_encryption_scope="testscope1", prevent_encryption_scope_override=True +) # ------------------------------------------------------------------------------ class TestStorageCPKN(StorageRecordedTestCase): def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") # prep some test data so that they can be used in upload tests self.byte_data = self.get_random_bytes(10 * 1024) @@ -64,26 +64,26 @@ def _teardown(self, bsc): def _get_blob_reference(self): return self.get_resource_name("cpk") - def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False): + def _create_block_blob( + self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False + ): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) - data = data if data else b'' - resp = blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite) + data = data if data else b"" + resp = blob_client.upload_blob( + data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite + ) return blob_client, resp def _create_append_blob(self, bsc, encryption_scope=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) blob.create_append_blob(encryption_scope=encryption_scope) return blob def _create_page_blob(self, bsc, encryption_scope=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) blob.create_page_blob(1024 * 1024, encryption_scope=encryption_scope) return blob @@ -102,30 +102,31 @@ def test_put_block_and_put_block_list(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client, _ = self._create_block_blob(bsc) - blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE) - blob_client.stage_block('2', b'BBB', encryption_scope=TEST_ENCRYPTION_SCOPE) - blob_client.stage_block('3', b'CCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client.stage_block("2", b"BBB", encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client.stage_block("3", b"CCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -142,7 +143,8 @@ def test_put_block_and_put_block_list_with_blob_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_name = self._get_blob_reference() @@ -156,30 +158,31 @@ def test_put_block_and_put_block_list_with_blob_sas(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE, ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) + blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1).get_blob_client( + self.container_name, blob_name + ) - blob_client.stage_block('1', b'AAA') - blob_client.stage_block('2', b'BBB') - blob_client.stage_block('3', b'CCC') + blob_client.stage_block("1", b"AAA") + blob_client.stage_block("2", b"BBB") + blob_client.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = blob_client.commit_block_list(block_list) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -196,7 +199,8 @@ def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_name = self._get_blob_reference() @@ -210,18 +214,19 @@ def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE, ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) + blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1).get_blob_client( + self.container_name, blob_name + ) # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception with pytest.raises(HttpResponseError): - blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE_2) + blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE_2) # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - block_list = [BlobBlock(block_id='1')] + block_list = [BlobBlock(block_id="1")] # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception with pytest.raises(HttpResponseError): blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE_2) @@ -230,10 +235,10 @@ def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): put_block_list_resp = blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # generate a sas with a different encryption scope token2 = self.generate_sas( @@ -246,8 +251,9 @@ def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) - blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\ - .get_blob_client(self.container_name, blob_name) + blob_client_diff_encryption_scope_sas = BlobServiceClient( + self.account_url(storage_account_name, "blob"), token2 + ).get_blob_client(self.container_name, blob_name) # blob can be downloaded successfully no matter which encryption scope is used on the blob actually # the encryption scope on blob is TEST_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_SCOPE_2 in SAS token, @@ -255,9 +261,9 @@ def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): blob = blob_client_diff_encryption_scope_sas.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == b'AAA' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == b"AAA" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -274,7 +280,8 @@ def test_create_block_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # to force the in-memory chunks to be used self.config.use_byte_buffer = True @@ -282,24 +289,22 @@ def test_create_block_blob_with_chunks(self, **kwargs): # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = self._create_block_blob( - bsc, - data=self.byte_data, - encryption_scope=TEST_ENCRYPTION_SCOPE, - max_concurrency=2) + bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_SCOPE, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] self._teardown(bsc) @pytest.mark.live_test_only @@ -315,30 +320,29 @@ def test_create_block_blob_with_sub_streams(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = self._create_block_blob( - bsc, - data=self.byte_data, - encryption_scope=TEST_ENCRYPTION_SCOPE, - max_concurrency=2) + bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_SCOPE, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] self._teardown(bsc) @BlobPreparer() @@ -354,27 +358,28 @@ def test_create_block_blob_with_single_chunk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - data = b'AAABBBCCC' + data = b"AAABBBCCC" # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = self._create_block_blob(bsc, data=data, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk assert blob.readall() == data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] self._teardown(bsc) @BlobPreparer() @@ -390,7 +395,8 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) # create source blob and get source blob url @@ -405,7 +411,7 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -414,39 +420,48 @@ def test_put_block_from_url_and_commit_with_cpk(self, **kwargs): destination_blob_client, _ = self._create_block_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act part 1: make put block from url calls - destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url, - source_offset=0, source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) - destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url, - source_offset=4 * 1024, source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) + destination_blob_client.stage_block_from_url( + block_id=1, + source_url=source_blob_url, + source_offset=0, + source_length=4 * 1024, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) + destination_blob_client.stage_block_from_url( + block_id=2, + source_url=source_blob_url, + source_offset=4 * 1024, + source_length=4 * 1024, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) # Assert blocks - committed, uncommitted = destination_blob_client.get_block_list('all') + committed, uncommitted = destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # commit the blocks without cpk should fail - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2")] with pytest.raises(HttpResponseError): destination_blob_client.commit_block_list(block_list) # Act commit the blocks with cpk should succeed - put_block_list_resp = destination_blob_client.commit_block_list(block_list, - encryption_scope=TEST_ENCRYPTION_SCOPE) + put_block_list_resp = destination_blob_client.commit_block_list( + block_list, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] # Act get the blob content blob = destination_blob_client.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == self.byte_data[0: 8 * 1024] - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert blob.readall() == self.byte_data[0 : 8 * 1024] + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] self._teardown(bsc) @BlobPreparer() @@ -462,25 +477,26 @@ def test_append_block(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - for content in [b'AAA', b'BBB', b'CCC']: + for content in [b"AAA", b"BBB", b"CCC"]: append_blob_prop = blob_client.append_block(content, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == b'AAABBBCCC' + assert blob.readall() == b"AAABBBCCC" self._teardown(bsc) @BlobPreparer() @@ -496,7 +512,8 @@ def test_append_block_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") self.config.use_byte_buffer = True # chunk upload @@ -509,7 +526,7 @@ def test_append_block_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -517,22 +534,21 @@ def test_append_block_from_url(self, **kwargs): destination_blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - append_blob_prop = destination_blob_client.append_block_from_url(source_blob_url, - source_offset=0, - source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) + append_blob_prop = destination_blob_client.append_block_from_url( + source_blob_url, source_offset=0, source_length=4 * 1024, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = destination_blob_client.download_blob() # Assert content was retrieved with the cpk - assert blob.readall() == self.byte_data[0: 4 * 1024] + assert blob.readall() == self.byte_data[0 : 4 * 1024] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -549,19 +565,21 @@ def test_create_append_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - append_blob_prop = blob_client.upload_blob(self.byte_data, - blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE) + append_blob_prop = blob_client.upload_blob( + self.byte_data, blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() @@ -584,25 +602,24 @@ def test_update_page(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - page_blob_prop = blob_client.upload_page(self.byte_data, - offset=0, - length=len(self.byte_data), - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = blob_client.upload_page( + self.byte_data, offset=0, length=len(self.byte_data), encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content - blob = blob_client.download_blob(offset=0, - length=len(self.byte_data)) + blob = blob_client.download_blob(offset=0, length=len(self.byte_data)) # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data @@ -622,7 +639,8 @@ def test_update_page_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request @@ -635,7 +653,7 @@ def test_update_page_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -643,21 +661,22 @@ def test_update_page_from_url(self, **kwargs): blob_client = self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - page_blob_prop = blob_client.upload_pages_from_url(source_blob_url, - offset=0, - length=len(self.byte_data), - source_offset=0, - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = blob_client.upload_pages_from_url( + source_blob_url, + offset=0, + length=len(self.byte_data), + source_offset=0, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content - blob = blob_client.download_blob(offset=0, - length=len(self.byte_data)) + blob = blob_client.download_blob(offset=0, length=len(self.byte_data)) # Assert content was retrieved with the cpk assert blob.readall() == self.byte_data @@ -679,19 +698,19 @@ def test_create_page_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - page_blob_prop = blob_client.upload_blob(self.byte_data, - blob_type=BlobType.PageBlob, - max_concurrency=2, - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = blob_client.upload_blob( + self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = blob_client.download_blob() @@ -714,19 +733,20 @@ def test_get_set_blob_metadata(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client, _ = self._create_block_blob(bsc, data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act blob_props = blob_client.get_blob_properties() # Assert assert blob_props.server_encrypted - assert blob_props['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert blob_props["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act set blob properties - metadata = {'hello': 'world', 'number': '42', 'up': 'upval'} + metadata = {"hello": "world", "number": "42", "up": "upval"} with pytest.raises(HttpResponseError): blob_client.set_blob_metadata( metadata=metadata, @@ -738,10 +758,10 @@ def test_get_set_blob_metadata(self, **kwargs): blob_props = blob_client.get_blob_properties() md = blob_props.metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['up'] == 'upval' - assert not 'Up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["up"] == "upval" + assert not "Up" in md self._teardown(bsc) @BlobPreparer() @@ -757,9 +777,10 @@ def test_snapshot_blob(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client, _ = self._create_block_blob(bsc, data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act without cpk should not work with pytest.raises(HttpResponseError): @@ -785,9 +806,10 @@ def test_list_blobs(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) - self._create_block_blob(bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + self._create_block_blob(bsc, blob_name="blockblob", data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE) self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) container_client = bsc.get_container_client(self.container_name) @@ -813,7 +835,8 @@ def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc) token = self.generate_sas( @@ -823,7 +846,7 @@ def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): storage_account_key, permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -831,9 +854,10 @@ def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) # blob is encrypted using TEST_ENCRYPTION_SCOPE - self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True) self._create_append_blob(bsc_with_sas_credential) # generate a token with TEST_ENCRYPTION_SCOPE_2 @@ -844,7 +868,7 @@ def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): storage_account_key, permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE_2 + encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) bsc_with_diff_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -852,7 +876,8 @@ def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name) # The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while @@ -880,7 +905,7 @@ def test_copy_with_account_encryption_scope_sas(self, **kwargs): resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE_2 + encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) bsc_with_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -888,11 +913,14 @@ def test_copy_with_account_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc_with_sas_credential) # blob is encrypted using TEST_ENCRYPTION_SCOPE_2 - blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + blob_client, _ = self._create_block_blob( + bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True + ) sas_token2 = self.generate_sas( generate_account_sas, @@ -901,7 +929,7 @@ def test_copy_with_account_encryption_scope_sas(self, **kwargs): resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_account_key_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -909,8 +937,9 @@ def test_copy_with_account_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') + max_page_size=1024, + ) + copied_blob = self.get_resource_name("copiedblob") copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob) # TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy. @@ -944,10 +973,13 @@ def test_copy_blob_from_url_with_ecryption_scope(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) self._setup(bsc_with_sas_credential) - blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + blob_client, _ = self._create_block_blob( + bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True + ) bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -955,12 +987,14 @@ def test_copy_blob_from_url_with_ecryption_scope(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') + max_page_size=1024, + ) + copied_blob = self.get_resource_name("copiedblob") copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob) - copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True, - encryption_scope=TEST_ENCRYPTION_SCOPE) + copied_blob_client.start_copy_from_url( + blob_client.url, requires_sync=True, encryption_scope=TEST_ENCRYPTION_SCOPE + ) props = copied_blob_client.get_blob_properties() @@ -982,14 +1016,16 @@ def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) - user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), - datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service_client.get_user_delegation_key( + datetime.utcnow(), datetime.utcnow() + timedelta(hours=1) + ) self._setup(service_client) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") sas_token = self.generate_sas( generate_blob_sas, storage_account_name, @@ -998,7 +1034,7 @@ def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): account_key=user_delegation_key, permission=BlobSasPermissions(read=True, write=True, create=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_delegation_sas = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -1006,10 +1042,13 @@ def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) # blob is encrypted using TEST_ENCRYPTION_SCOPE - blob_client, _ = self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True) + blob_client, _ = self._create_block_blob( + bsc_with_delegation_sas, blob_name=blob_name, data=b"AAABBBCCC", overwrite=True + ) props = blob_client.get_blob_properties() assert props.encryption_scope == TEST_ENCRYPTION_SCOPE @@ -1029,25 +1068,31 @@ def test_create_container_with_default_cpk_n(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) - container_client = bsc.create_container('cpkcontainer', - container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE) + max_page_size=1024, + ) + container_client = bsc.create_container( + "cpkcontainer", container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE + ) container_props = container_client.get_container_properties() - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == False - for _ in bsc.list_containers(name_starts_with='cpkcontainer'): - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + for _ in bsc.list_containers(name_starts_with="cpkcontainer"): + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == False blob_client = container_client.get_blob_client("appendblob") # providing encryption scope when upload the blob - resp = blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) + resp = blob_client.upload_blob(b"aaaa", BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) # Use the provided encryption scope on the blob - assert resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE_2 + assert resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE_2 container_client.delete_container() @@ -1064,31 +1109,36 @@ def test_create_container_with_default_cpk_n_deny_override(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) container_client = bsc.create_container( - 'denyoverridecpkcontainer', - container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE + "denyoverridecpkcontainer", container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE ) container_props = container_client.get_container_properties() - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == True - for _ in bsc.list_containers(name_starts_with='denyoverridecpkcontainer'): - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + for _ in bsc.list_containers(name_starts_with="denyoverridecpkcontainer"): + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == True blob_client = container_client.get_blob_client("appendblob") # It's not allowed to set encryption scope on the blob when the container denies encryption scope override. with pytest.raises(HttpResponseError): - blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) + blob_client.upload_blob(b"aaaa", BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) - resp = blob_client.upload_blob(b'aaaa', BlobType.AppendBlob) + resp = blob_client.upload_blob(b"aaaa", BlobType.AppendBlob) - assert resp['encryption_scope'] == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + assert resp["encryption_scope"] == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope container_client.delete_container() + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py b/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py index d2a26d6795df..326633409145 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py @@ -34,8 +34,8 @@ TEST_ENCRYPTION_SCOPE_2 = "testscope2" TEST_CONTAINER_ENCRYPTION_SCOPE = ContainerEncryptionScope(default_encryption_scope="testscope1") TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE = ContainerEncryptionScope( - default_encryption_scope="testscope1", - prevent_encryption_scope_override=True) + default_encryption_scope="testscope1", prevent_encryption_scope_override=True +) # ------------------------------------------------------------------------------ @@ -43,7 +43,7 @@ class TestStorageCPKAsync(AsyncStorageRecordedTestCase): async def _setup(self, bsc): self.config = bsc._config self.byte_data = self.get_random_bytes(10 * 1024) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: await bsc.create_container(self.container_name) @@ -63,26 +63,26 @@ def _teardown(self, bsc): def _get_blob_reference(self): return self.get_resource_name("cpk") - async def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False): + async def _create_block_blob( + self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False + ): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) - data = data if data else b'' - resp = await blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite) + data = data if data else b"" + resp = await blob_client.upload_blob( + data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite + ) return blob_client, resp async def _create_append_blob(self, bsc, encryption_scope=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) await blob.create_append_blob(encryption_scope=encryption_scope) return blob async def _create_page_blob(self, bsc, encryption_scope=None): blob_name = self._get_blob_reference() - blob = bsc.get_blob_client( - self.container_name, - blob_name) + blob = bsc.get_blob_client(self.container_name, blob_name) await blob.create_page_blob(1024 * 1024, encryption_scope=encryption_scope) return blob @@ -101,31 +101,32 @@ async def test_put_block_and_put_block_list(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") blob_client, _ = await self._create_block_blob(bsc) - await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE) - await blob_client.stage_block('2', b'BBB', encryption_scope=TEST_ENCRYPTION_SCOPE) - await blob_client.stage_block('3', b'CCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + await blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE) + await blob_client.stage_block("2", b"BBB", encryption_scope=TEST_ENCRYPTION_SCOPE) + await blob_client.stage_block("3", b"CCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = await blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk - assert await blob.readall() == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert await blob.readall() == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -142,7 +143,8 @@ async def test_put_block_and_put_block_list_with_blob_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_name = self._get_blob_reference() @@ -156,31 +158,32 @@ async def test_put_block_and_put_block_list_with_blob_sas(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE, ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) + blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1).get_blob_client( + self.container_name, blob_name + ) - await blob_client.stage_block('1', b'AAA') - await blob_client.stage_block('2', b'BBB') - await blob_client.stage_block('3', b'CCC') + await blob_client.stage_block("1", b"AAA") + await blob_client.stage_block("2", b"BBB") + await blob_client.stage_block("3", b"CCC") # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2"), BlobBlock(block_id="3")] put_block_list_resp = await blob_client.commit_block_list(block_list) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() content = await blob.readall() # Assert content was retrieved with the cpk - assert content == b'AAABBBCCC' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert content == b"AAABBBCCC" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -197,7 +200,8 @@ async def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_name = self._get_blob_reference() @@ -211,18 +215,19 @@ async def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE, ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) + blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1).get_blob_client( + self.container_name, blob_name + ) # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception with pytest.raises(HttpResponseError): - await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE_2) + await blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE_2) # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_SCOPE) + await blob_client.stage_block("1", b"AAA", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - block_list = [BlobBlock(block_id='1')] + block_list = [BlobBlock(block_id="1")] # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception with pytest.raises(HttpResponseError): await blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE_2) @@ -231,10 +236,10 @@ async def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): put_block_list_resp = await blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # generate a sas with a different encryption scope token2 = self.generate_sas( @@ -247,8 +252,9 @@ async def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): expiry=datetime.utcnow() + timedelta(hours=1), encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) - blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\ - .get_blob_client(self.container_name, blob_name) + blob_client_diff_encryption_scope_sas = BlobServiceClient( + self.account_url(storage_account_name, "blob"), token2 + ).get_blob_client(self.container_name, blob_name) # blob can be downloaded successfully no matter which encryption scope is used on the blob actually # the encryption scope on blob is TEST_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_SCOPE_2 in SAS token, @@ -257,9 +263,9 @@ async def test_put_block_and_put_block_list_with_blob_sas_fails(self, **kwargs): content = await blob.readall() # Assert content was retrieved with the cpk - assert content == b'AAA' - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert content == b"AAA" + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE self._teardown(bsc) @@ -276,7 +282,8 @@ async def test_create_block_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # to force the in-memory chunks to be used self.config.use_byte_buffer = True @@ -284,24 +291,22 @@ async def test_create_block_blob_with_chunks(self, **kwargs): # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = await self._create_block_blob( - bsc, - data=self.byte_data, - encryption_scope=TEST_ENCRYPTION_SCOPE, - max_concurrency=2) + bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_SCOPE, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @pytest.mark.live_test_only @@ -319,30 +324,29 @@ async def test_create_block_blob_with_sub_streams(self, **kwargs): min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, - retry_total=0) + retry_total=0, + ) await self._setup(bsc) # to force the in-memory chunks to be used self.config.use_byte_buffer = True blob_client, upload_response = await self._create_block_blob( - bsc, - data=self.byte_data, - encryption_scope=TEST_ENCRYPTION_SCOPE, - max_concurrency=2) + bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_SCOPE, max_concurrency=2 + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk assert await blob.readall() == self.byte_data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @BlobPreparer() @@ -358,25 +362,28 @@ async def test_create_block_blob_with_single_chunk(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - data = b'AAABBBCCC' + data = b"AAABBBCCC" # create_blob_from_bytes forces the in-memory chunks to be used - blob_client, upload_response = await self._create_block_blob(bsc, data=data, encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client, upload_response = await self._create_block_blob( + bsc, data=data, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert upload_response['etag'] is not None - assert upload_response['last_modified'] is not None - assert upload_response['request_server_encrypted'] - assert upload_response['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert upload_response["etag"] is not None + assert upload_response["last_modified"] is not None + assert upload_response["request_server_encrypted"] + assert upload_response["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk assert await blob.readall() == data - assert blob.properties.etag == upload_response['etag'] - assert blob.properties.last_modified == upload_response['last_modified'] + assert blob.properties.etag == upload_response["etag"] + assert blob.properties.last_modified == upload_response["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @BlobPreparer() @@ -392,7 +399,8 @@ async def test_put_block_from_url_and_commit(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # create source blob and get source blob url @@ -407,7 +415,7 @@ async def test_put_block_from_url_and_commit(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -416,39 +424,49 @@ async def test_put_block_from_url_and_commit(self, **kwargs): destination_blob_client, _ = await self._create_block_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act part 1: make put block from url calls - await destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url, - source_offset=0, source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) - await destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url, - source_offset=4 * 1024, source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) + await destination_blob_client.stage_block_from_url( + block_id=1, + source_url=source_blob_url, + source_offset=0, + source_length=4 * 1024, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) + await destination_blob_client.stage_block_from_url( + block_id=2, + source_url=source_blob_url, + source_offset=4 * 1024, + source_length=4 * 1024, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) # Assert blocks - committed, uncommitted = await destination_blob_client.get_block_list('all') + committed, uncommitted = await destination_blob_client.get_block_list("all") assert len(uncommitted) == 2 assert len(committed) == 0 # commit the blocks without cpk should fail - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')] + block_list = [BlobBlock(block_id="1"), BlobBlock(block_id="2")] with pytest.raises(HttpResponseError): await destination_blob_client.commit_block_list(block_list) # Act commit the blocks with cpk should succeed - put_block_list_resp = await destination_blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_SCOPE) + put_block_list_resp = await destination_blob_client.commit_block_list( + block_list, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert put_block_list_resp['etag'] is not None - assert put_block_list_resp['last_modified'] is not None - assert put_block_list_resp['request_server_encrypted'] - assert put_block_list_resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert put_block_list_resp["etag"] is not None + assert put_block_list_resp["last_modified"] is not None + assert put_block_list_resp["request_server_encrypted"] + assert put_block_list_resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await destination_blob_client.download_blob() # Assert content was retrieved with the cpk - assert await blob.readall() == self.byte_data[0: 8 * 1024] - assert blob.properties.etag == put_block_list_resp['etag'] - assert blob.properties.last_modified == put_block_list_resp['last_modified'] + assert await blob.readall() == self.byte_data[0 : 8 * 1024] + assert blob.properties.etag == put_block_list_resp["etag"] + assert blob.properties.last_modified == put_block_list_resp["last_modified"] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @BlobPreparer() @@ -464,25 +482,26 @@ async def test_append_block(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - for content in [b'AAA', b'BBB', b'CCC']: + for content in [b"AAA", b"BBB", b"CCC"]: append_blob_prop = await blob_client.append_block(content, encryption_scope=TEST_ENCRYPTION_SCOPE) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk - assert await blob.readall() == b'AAABBBCCC' + assert await blob.readall() == b"AAABBBCCC" assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @BlobPreparer() @@ -498,7 +517,8 @@ async def test_append_block_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -512,7 +532,7 @@ async def test_append_block_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -520,22 +540,21 @@ async def test_append_block_from_url(self, **kwargs): destination_blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - append_blob_prop = await destination_blob_client.append_block_from_url(source_blob_url, - source_offset=0, - source_length=4 * 1024, - encryption_scope=TEST_ENCRYPTION_SCOPE) + append_blob_prop = await destination_blob_client.append_block_from_url( + source_blob_url, source_offset=0, source_length=4 * 1024, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await destination_blob_client.download_blob() # Assert content was retrieved with the cpk - assert await blob.readall() == self.byte_data[0: 4 * 1024] + assert await blob.readall() == self.byte_data[0 : 4 * 1024] assert blob.properties.encryption_scope == TEST_ENCRYPTION_SCOPE @BlobPreparer() @@ -551,19 +570,21 @@ async def test_create_append_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - append_blob_prop = await blob_client.upload_blob(self.byte_data, - blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE) + append_blob_prop = await blob_client.upload_blob( + self.byte_data, blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert append_blob_prop['etag'] is not None - assert append_blob_prop['last_modified'] is not None - assert append_blob_prop['request_server_encrypted'] - assert append_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert append_blob_prop["etag"] is not None + assert append_blob_prop["last_modified"] is not None + assert append_blob_prop["request_server_encrypted"] + assert append_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() @@ -585,21 +606,21 @@ async def test_update_page(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) blob_client = await self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - page_blob_prop = await blob_client.upload_page(self.byte_data, - offset=0, - length=len(self.byte_data), - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = await blob_client.upload_page( + self.byte_data, offset=0, length=len(self.byte_data), encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob(offset=0, length=len(self.byte_data)) @@ -621,7 +642,8 @@ async def test_update_page_from_url(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) source_blob_name = self.get_resource_name("sourceblob") @@ -635,7 +657,7 @@ async def test_update_page_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1) + expiry=datetime.utcnow() + timedelta(hours=1), ) source_blob_url = source_blob_client.url + "?" + source_blob_sas @@ -643,17 +665,19 @@ async def test_update_page_from_url(self, **kwargs): blob_client = await self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) # Act - page_blob_prop = await blob_client.upload_pages_from_url(source_blob_url, - offset=0, - length=len(self.byte_data), - source_offset=0, - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = await blob_client.upload_pages_from_url( + source_blob_url, + offset=0, + length=len(self.byte_data), + source_offset=0, + encryption_scope=TEST_ENCRYPTION_SCOPE, + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob(offset=0, length=len(self.byte_data)) @@ -674,21 +698,21 @@ async def test_create_page_blob_with_chunks(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) # Act blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) - page_blob_prop = await blob_client.upload_blob(self.byte_data, - blob_type=BlobType.PageBlob, - max_concurrency=2, - encryption_scope=TEST_ENCRYPTION_SCOPE) + page_blob_prop = await blob_client.upload_blob( + self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, encryption_scope=TEST_ENCRYPTION_SCOPE + ) # Assert - assert page_blob_prop['etag'] is not None - assert page_blob_prop['last_modified'] is not None - assert page_blob_prop['request_server_encrypted'] - assert page_blob_prop['encryption_scope'] == TEST_ENCRYPTION_SCOPE + assert page_blob_prop["etag"] is not None + assert page_blob_prop["last_modified"] is not None + assert page_blob_prop["request_server_encrypted"] + assert page_blob_prop["encryption_scope"] == TEST_ENCRYPTION_SCOPE # Act get the blob content blob = await blob_client.download_blob() @@ -710,9 +734,10 @@ async def test_get_set_blob_metadata(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client, _ = await self._create_block_blob(bsc, data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act blob_props = await blob_client.get_blob_properties() @@ -722,7 +747,7 @@ async def test_get_set_blob_metadata(self, **kwargs): assert blob_props.encryption_scope == TEST_ENCRYPTION_SCOPE # Act set blob properties - metadata = {'hello': 'world', 'number': '42', 'up': 'upval'} + metadata = {"hello": "world", "number": "42", "up": "upval"} with pytest.raises(HttpResponseError): await blob_client.set_blob_metadata( metadata=metadata, @@ -734,10 +759,10 @@ async def test_get_set_blob_metadata(self, **kwargs): blob_props = await blob_client.get_blob_properties() md = blob_props.metadata assert 3 == len(md) - assert md['hello'] == 'world' - assert md['number'] == '42' - assert md['up'] == 'upval' - assert not 'Up' in md + assert md["hello"] == "world" + assert md["number"] == "42" + assert md["up"] == "upval" + assert not "Up" in md @BlobPreparer() @recorded_by_proxy_async @@ -752,9 +777,10 @@ async def test_snapshot_blob(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + blob_client, _ = await self._create_block_blob(bsc, data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE) # Act without cpk should not work with pytest.raises(HttpResponseError): @@ -779,9 +805,12 @@ async def test_list_blobs(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) - await self._create_block_blob(bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_SCOPE) + await self._create_block_blob( + bsc, blob_name="blockblob", data=b"AAABBBCCC", encryption_scope=TEST_ENCRYPTION_SCOPE + ) await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_SCOPE) container_client = bsc.get_container_client(self.container_name) @@ -807,7 +836,8 @@ async def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc) token = self.generate_sas( @@ -817,7 +847,7 @@ async def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): storage_account_key, permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -825,9 +855,10 @@ async def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) # blob is encrypted using TEST_ENCRYPTION_SCOPE - await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True) await self._create_append_blob(bsc_with_sas_credential) # generate a token with TEST_ENCRYPTION_SCOPE_2 @@ -838,7 +869,7 @@ async def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): storage_account_key, permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE_2 + encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) bsc_with_diff_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -846,7 +877,8 @@ async def test_list_blobs_using_container_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name) # The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while @@ -874,7 +906,7 @@ async def test_copy_with_account_encryption_scope_sas(self, **kwargs): resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE_2 + encryption_scope=TEST_ENCRYPTION_SCOPE_2, ) bsc_with_sas_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -882,11 +914,14 @@ async def test_copy_with_account_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc_with_sas_credential) # blob is encrypted using TEST_ENCRYPTION_SCOPE_2 - blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + blob_client, _ = await self._create_block_blob( + bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True + ) sas_token2 = self.generate_sas( generate_account_sas, @@ -895,7 +930,7 @@ async def test_copy_with_account_encryption_scope_sas(self, **kwargs): resource_types=ResourceTypes(object=True, container=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_account_key_credential = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -903,8 +938,9 @@ async def test_copy_with_account_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') + max_page_size=1024, + ) + copied_blob = self.get_resource_name("copiedblob") copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob) # TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy. @@ -940,10 +976,13 @@ async def test_copy_blob_from_url_with_ecryption_scope(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) await self._setup(bsc_with_sas_credential) - blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) + blob_client, _ = await self._create_block_blob( + bsc_with_sas_credential, blob_name="blockblob", data=b"AAABBBCCC", overwrite=True + ) bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -951,12 +990,14 @@ async def test_copy_blob_from_url_with_ecryption_scope(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') + max_page_size=1024, + ) + copied_blob = self.get_resource_name("copiedblob") copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob) - await copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True, - encryption_scope=TEST_ENCRYPTION_SCOPE) + await copied_blob_client.start_copy_from_url( + blob_client.url, requires_sync=True, encryption_scope=TEST_ENCRYPTION_SCOPE + ) props = await copied_blob_client.get_blob_properties() @@ -978,14 +1019,16 @@ async def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) - user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(), - datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = await service_client.get_user_delegation_key( + datetime.utcnow(), datetime.utcnow() + timedelta(hours=1) + ) await self._setup(service_client) - blob_name = self.get_resource_name('blob') + blob_name = self.get_resource_name("blob") sas_token = self.generate_sas( generate_blob_sas, @@ -995,7 +1038,7 @@ async def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): account_key=user_delegation_key, permission=BlobSasPermissions(read=True, write=True, create=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_SCOPE + encryption_scope=TEST_ENCRYPTION_SCOPE, ) bsc_with_delegation_sas = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -1003,10 +1046,13 @@ async def test_copy_with_user_delegation_encryption_scope_sas(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) # blob is encrypted using TEST_ENCRYPTION_SCOPE - blob_client, _ = await self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True) + blob_client, _ = await self._create_block_blob( + bsc_with_delegation_sas, blob_name=blob_name, data=b"AAABBBCCC", overwrite=True + ) props = await blob_client.get_blob_properties() assert props.encryption_scope == TEST_ENCRYPTION_SCOPE @@ -1026,27 +1072,32 @@ async def test_create_container_with_default_cpk_n(self, **kwargs): max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) container_client = await bsc.create_container( - 'asynccpkcontainer', - container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE) + "asynccpkcontainer", container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE + ) container_props = await container_client.get_container_properties() - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == False - async for container in bsc.list_containers(name_starts_with='asynccpkcontainer'): - assert container.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + async for container in bsc.list_containers(name_starts_with="asynccpkcontainer"): + assert ( + container.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == False blob_client = container_client.get_blob_client("appendblob") # providing encryption scope when upload the blob - resp = await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) + resp = await blob_client.upload_blob(b"aaaa", BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) # Use the provided encryption scope on the blob - assert resp['encryption_scope'] == TEST_ENCRYPTION_SCOPE_2 + assert resp["encryption_scope"] == TEST_ENCRYPTION_SCOPE_2 await container_client.delete_container() @@ -1063,29 +1114,33 @@ async def test_create_container_with_default_cpk_n_deny_override(self, **kwargs) max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, - max_page_size=1024) + max_page_size=1024, + ) container_client = await bsc.create_container( - 'asyncdenyoverridecpkcontainer', - container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE + "asyncdenyoverridecpkcontainer", container_encryption_scope=TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE ) container_props = await container_client.get_container_properties() - assert container_props.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + assert ( + container_props.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == True - async for container in bsc.list_containers(name_starts_with='asyncdenyoverridecpkcontainer'): - assert container.encryption_scope.default_encryption_scope == \ - TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + async for container in bsc.list_containers(name_starts_with="asyncdenyoverridecpkcontainer"): + assert ( + container.encryption_scope.default_encryption_scope + == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + ) assert container_props.encryption_scope.prevent_encryption_scope_override == True blob_client = container_client.get_blob_client("appendblob") # It's not allowed to set encryption scope on the blob when the container denies encryption scope override. with pytest.raises(HttpResponseError): - await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) + await blob_client.upload_blob(b"aaaa", BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_SCOPE_2) - resp = await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob) + resp = await blob_client.upload_blob(b"aaaa", BlobType.AppendBlob) - assert resp['encryption_scope'] == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope + assert resp["encryption_scope"] == TEST_CONTAINER_ENCRYPTION_SCOPE_DENY_OVERRIDE.default_encryption_scope await container_client.delete_container() diff --git a/sdk/storage/azure-storage-blob/tests/test_dictmixin.py b/sdk/storage/azure-storage-blob/tests/test_dictmixin.py index ceff65cd5d82..7ed510a5fb0b 100644 --- a/sdk/storage/azure-storage-blob/tests/test_dictmixin.py +++ b/sdk/storage/azure-storage-blob/tests/test_dictmixin.py @@ -61,7 +61,7 @@ def test_update(self): updated = {key: updatedval} model.update(updated) assert model[key] == updatedval - + def test_values_items(self): model = DictMixin() key = "testkey" diff --git a/sdk/storage/azure-storage-blob/tests/test_get_blob.py b/sdk/storage/azure-storage-blob/tests/test_get_blob.py index 721a68661181..a0e6add7b09c 100644 --- a/sdk/storage/azure-storage-blob/tests/test_get_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_get_blob.py @@ -19,7 +19,7 @@ from test_helpers import NonSeekableStream, ProgressTracker # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ @@ -32,9 +32,10 @@ def _setup(self, storage_account_name, key, upload_blob=True): self.account_url(storage_account_name, "blob"), credential=key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -43,7 +44,7 @@ def _setup(self, storage_account_name, key, upload_blob=True): except: pass - self.byte_blob = self.get_resource_name('byteblob') + self.byte_blob = self.get_resource_name("byteblob") self.byte_data = self.get_random_bytes(64 * 1024 + 5) if self.is_live and upload_blob: @@ -63,7 +64,7 @@ def test_unicode_get_blob_unicode_data(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_data = u'hello world啊齄丂狛狜'.encode('utf-8') + blob_data = "hello world啊齄丂狛狜".encode("utf-8") blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) @@ -82,7 +83,7 @@ def test_unicode_get_blob_binary_data(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==' + base64_data = "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==" binary_data = base64.b64decode(base64_data) blob_name = self._get_blob_reference() @@ -103,7 +104,7 @@ def test_get_blob_no_content(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_data = b'' + blob_data = b"" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) @@ -162,7 +163,7 @@ def test_ranged_get_blob_to_bytes_with_zero_byte(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_data = b'' + blob_data = b"" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) @@ -184,7 +185,7 @@ def test_ranged_get_blob_with_missing_start_range(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - blob_data = b'foobar' + blob_data = b"foobar" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(blob_data) @@ -207,7 +208,7 @@ def test_get_blob_to_bytes_snapshot(self, **kwargs): snapshot_ref = blob.create_snapshot() snapshot = self.bsc.get_blob_client(self.container_name, self.byte_blob, snapshot=snapshot_ref) - blob.upload_blob(self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches + blob.upload_blob(self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches # Act content = snapshot.download_blob(max_concurrency=2).readall() @@ -228,8 +229,8 @@ def test_get_blob_to_bytes_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -238,10 +239,8 @@ def callback(response): # Assert assert self.byte_data == content self.assert_download_progress( - len(self.byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -254,8 +253,8 @@ def test_get_blob_to_bytes_non_parallel(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -264,10 +263,8 @@ def callback(response): # Assert assert self.byte_data == content self.assert_download_progress( - len(self.byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -284,8 +281,8 @@ def test_get_blob_to_bytes_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -294,10 +291,8 @@ def callback(response): # Assert assert blob_data == content self.assert_download_progress( - len(blob_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -333,8 +328,8 @@ def test_get_blob_to_stream_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -346,7 +341,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert self.byte_data == actual - self.assert_download_progress(len(self.byte_data),self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -359,8 +356,8 @@ def test_get_blob_to_stream_non_parallel(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -372,7 +369,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert self.byte_data == actual - self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -389,11 +388,10 @@ def test_get_blob_to_stream_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) - # Act with tempfile.TemporaryFile() as temp_file: downloader = blob.download_blob(raw_response_hook=callback, max_concurrency=2) @@ -403,7 +401,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert blob_data == actual - self.assert_download_progress(len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -440,8 +440,8 @@ def test_ranged_get_blob_to_path_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -449,18 +449,18 @@ def callback(response): end_range = self.config.max_single_get_size + 1024 with tempfile.TemporaryFile() as temp_file: downloader = blob.download_blob( - offset=start_range, - length=end_range, - raw_response_hook=callback, - max_concurrency=2) + offset=start_range, length=end_range, raw_response_hook=callback, max_concurrency=2 + ) read_bytes = downloader.readinto(temp_file) # Assert assert read_bytes == end_range temp_file.seek(0) actual = temp_file.read() - assert self.byte_data[start_range:end_range + start_range] == actual - self.assert_download_progress(end_range, self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + assert self.byte_data[start_range : end_range + start_range] == actual + self.assert_download_progress( + end_range, self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -565,13 +565,13 @@ def test_get_blob_to_text(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - text_blob = self.get_resource_name('textblob') + text_blob = self.get_resource_name("textblob") text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) blob.upload_blob(text_data) # Act - stream = blob.download_blob(max_concurrency=2, encoding='UTF-8') + stream = blob.download_blob(max_concurrency=2, encoding="UTF-8") content = stream.readall() # Assert @@ -586,7 +586,7 @@ def test_get_blob_to_text_with_progress(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - text_blob = self.get_resource_name('textblob') + text_blob = self.get_resource_name("textblob") text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) blob.upload_blob(text_data) @@ -594,24 +594,19 @@ def test_get_blob_to_text_with_progress(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = blob.download_blob( - raw_response_hook=callback, - max_concurrency=2, - encoding='UTF-8') + stream = blob.download_blob(raw_response_hook=callback, max_concurrency=2, encoding="UTF-8") content = stream.readall() # Assert assert text_data == content self.assert_download_progress( - len(text_data.encode('utf-8')), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text_data.encode("utf-8")), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -628,24 +623,19 @@ def test_get_blob_to_text_non_parallel(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = blob.download_blob( - raw_response_hook=callback, - max_concurrency=1, - encoding='UTF-8') + stream = blob.download_blob(raw_response_hook=callback, max_concurrency=1, encoding="UTF-8") content = stream.readall() # Assert assert text_data == content self.assert_download_progress( - len(text_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -662,21 +652,19 @@ def test_get_blob_to_text_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = blob.download_blob(raw_response_hook=callback, encoding='UTF-8') + stream = blob.download_blob(raw_response_hook=callback, encoding="UTF-8") content = stream.readall() # Assert assert blob_data == content self.assert_download_progress( - len(blob_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -685,13 +673,13 @@ def test_get_blob_to_text_with_encoding(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - text = u'hello 啊齄丂狛狜 world' + text = "hello 啊齄丂狛狜 world" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(text, encoding='utf-16') + blob.upload_blob(text, encoding="utf-16") # Act - stream = blob.download_blob(encoding='UTF-16') + stream = blob.download_blob(encoding="UTF-16") content = stream.readall() # Assert @@ -704,29 +692,27 @@ def test_get_blob_to_text_with_encoding_and_progress(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - text = u'hello 啊齄丂狛狜 world' + text = "hello 啊齄丂狛狜 world" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(text, encoding='utf-16') + blob.upload_blob(text, encoding="utf-16") # Act progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) - stream = blob.download_blob(raw_response_hook=callback, encoding='UTF-16') + stream = blob.download_blob(raw_response_hook=callback, encoding="UTF-16") content = stream.readall() # Assert assert text == content self.assert_download_progress( - len(text.encode('utf-8')), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text.encode("utf-8")), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -783,8 +769,8 @@ def test_get_blob_to_stream_exact_get_size(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -796,7 +782,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert byte_data == actual - self.assert_download_progress(len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -813,8 +801,8 @@ def test_get_blob_exact_get_size(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -823,10 +811,8 @@ def callback(response): # Assert assert byte_data == content self.assert_download_progress( - len(byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy @@ -836,17 +822,15 @@ def test_get_blob_exact_chunk_size(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - byte_data = self.get_random_bytes( - self.config.max_single_get_size + - self.config.max_chunk_get_size) + byte_data = self.get_random_bytes(self.config.max_single_get_size + self.config.max_chunk_get_size) blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(byte_data) progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -855,10 +839,8 @@ def callback(response): # Assert assert byte_data == content self.assert_download_progress( - len(byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -910,7 +892,7 @@ def test_get_blob_range_to_stream_with_overall_md5(self, **kwargs): self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) props = blob.get_blob_properties() - props.content_settings.content_md5 = b'MDAwMDAwMDA=' + props.content_settings.content_md5 = b"MDAwMDAwMDA=" blob.set_http_headers(props.content_settings) # Act @@ -921,7 +903,7 @@ def test_get_blob_range_to_stream_with_overall_md5(self, **kwargs): # Assert assert downloader.size == 1024 assert read_bytes == 1024 - assert b'MDAwMDAwMDA=' == downloader.properties.content_settings.content_md5 + assert b"MDAwMDAwMDA=" == downloader.properties.content_settings.content_md5 @BlobPreparer() @recorded_by_proxy @@ -935,7 +917,7 @@ def test_get_blob_range_with_overall_md5(self, **kwargs): self._setup(storage_account_name, storage_account_key) props = blob.get_blob_properties() - props.content_settings.content_md5 = b'MDAwMDAwMDA=' + props.content_settings.content_md5 = b"MDAwMDAwMDA=" blob.set_http_headers(props.content_settings) # Act @@ -943,7 +925,7 @@ def test_get_blob_range_with_overall_md5(self, **kwargs): # Assert assert content.properties.size == 1024 - assert b'MDAwMDAwMDA=' == content.properties.content_settings.content_md5 + assert b"MDAwMDAwMDA=" == content.properties.content_settings.content_md5 @BlobPreparer() @recorded_by_proxy @@ -974,7 +956,7 @@ def test_get_blob_progress_single_get(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'a' * 512 + data = b"a" * 512 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -994,7 +976,7 @@ def test_get_blob_progress_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -1015,7 +997,7 @@ def test_get_blob_progress_chunked_parallel(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -1036,7 +1018,7 @@ def test_get_blob_progress_range(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -1046,10 +1028,7 @@ def test_get_blob_progress_range(self, **kwargs): # Act blob.download_blob( - offset=512, - length=length, - max_concurrency=3, - progress_hook=progress.assert_progress + offset=512, length=length, max_concurrency=3, progress_hook=progress.assert_progress ).readall() # Assert @@ -1063,7 +1042,7 @@ def test_get_blob_progress_readinto(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.upload_blob(data, overwrite=True) @@ -1086,7 +1065,7 @@ def test_get_blob_read_empty(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'' + data = b"" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1105,7 +1084,7 @@ def test_get_blob_read_all(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1125,7 +1104,7 @@ def test_get_blob_read_single(self, **kwargs): self.bsc._config.max_single_get_size = 10 * 1024 self.bsc._config.max_chunk_get_size = 10 * 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() @@ -1151,7 +1130,7 @@ def test_get_blob_read_small_chunks(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() @@ -1177,7 +1156,7 @@ def test_get_blob_read_large_chunks(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() @@ -1203,7 +1182,7 @@ def test_get_blob_read_chunk_equal_download_chunk(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() @@ -1230,7 +1209,7 @@ def test_get_blob_read_random_chunks(self, **kwargs): # Random chunk sizes, can only run live self._setup(storage_account_name, storage_account_key) - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() @@ -1255,7 +1234,7 @@ def test_get_blob_read_parallel(self, **kwargs): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob(max_concurrency=3) @@ -1284,13 +1263,13 @@ def test_get_blob_into_upload(self, **kwargs): self.bsc._config.max_single_put_size = 1024 self.bsc._config.max_block_size = 1024 - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) stream = blob.download_blob() # Act - blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference() + '-copy') + blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference() + "-copy") blob2.upload_blob(stream, overwrite=True) result = blob2.download_blob().readall() @@ -1304,7 +1283,7 @@ def test_get_blob_read_past(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'Hello World' + data = b"Hello World" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1316,7 +1295,7 @@ def test_get_blob_read_past(self, **kwargs): assert result == data for _ in range(3): result = stream.read(100) - assert result == b'' + assert result == b"" @BlobPreparer() @recorded_by_proxy @@ -1325,7 +1304,7 @@ def test_get_blob_read_ranged(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1337,8 +1316,8 @@ def test_get_blob_read_ranged(self, **kwargs): data1 = stream.read(read_size) data2 = stream.read(read_size) - assert data1 == data[offset:offset + read_size] - assert data2 == data[offset + read_size:offset + length] + assert data1 == data[offset : offset + read_size] + assert data2 == data[offset + read_size : offset + length] offset, length = 501, 3000 stream = blob.download_blob(offset=offset, length=length) @@ -1347,8 +1326,8 @@ def test_get_blob_read_ranged(self, **kwargs): data1 = stream.read(read_size) data2 = stream.read(read_size) - assert data1 == data[offset:offset + read_size] - assert data2 == data[offset + read_size:offset + length] + assert data1 == data[offset : offset + read_size] + assert data2 == data[offset + read_size : offset + length] @BlobPreparer() @recorded_by_proxy @@ -1357,7 +1336,7 @@ def test_get_blob_read_with_other_read_operations_single(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key) - data = b'Hello World' + data = b"Hello World" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1387,7 +1366,7 @@ def test_get_blob_read_with_other_read_operations_single(self, **kwargs): second = second_stream.getvalue() assert first == data - assert second == b'' + assert second == b"" assert read_size == 0 @BlobPreparer() @@ -1397,7 +1376,7 @@ def test_get_blob_read_with_other_read_operations_chunks(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1433,7 +1412,7 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) offset, length = 1024, 2048 @@ -1443,16 +1422,16 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = stream.read(100) # Read in first chunk second = stream.readall() - assert first == data[offset:offset + 100] - assert second == data[offset + 100:offset + length] + assert first == data[offset : offset + 100] + assert second == data[offset + 100 : offset + length] offset, length = 501, 5000 stream = blob.download_blob(offset=offset, length=length) first = stream.read(3000) # Read past first chunk second = stream.readall() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] stream = blob.download_blob(offset=offset, length=length) first = stream.read(3000) # Read past first chunk @@ -1460,8 +1439,8 @@ def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_size = stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] assert read_size == len(second) @BlobPreparer() @@ -1471,7 +1450,7 @@ def test_get_blob_read_progress(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1501,7 +1480,7 @@ def test_get_blob_read_progress_chars(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 260 # 3120 bytes + data = "你好世界" * 260 # 3120 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, overwrite=True) @@ -1517,7 +1496,7 @@ def assert_progress(self, current, total): self.num_read += 1 progress = CustomProgressTracker() - stream = blob.download_blob(encoding='utf-8', progress_hook=progress.assert_progress) + stream = blob.download_blob(encoding="utf-8", progress_hook=progress.assert_progress) # Act / Assert for _ in range(4): @@ -1531,18 +1510,18 @@ def test_get_blob_read_chars_single(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 5 + data = "你好世界" * 5 blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - blob.upload_blob(data, encoding='utf-8', overwrite=True) + blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = blob.download_blob(encoding='utf-8') + stream = blob.download_blob(encoding="utf-8") assert stream.read() == data - stream = blob.download_blob(encoding='utf-8') + stream = blob.download_blob(encoding="utf-8") assert stream.read(chars=100000) == data - result = '' - stream = blob.download_blob(encoding='utf-8') + result = "" + stream = blob.download_blob(encoding="utf-8") for _ in range(4): chunk = stream.read(chars=5) result += chunk @@ -1558,18 +1537,18 @@ def test_get_blob_read_chars_chunks(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 256 # 3 KiB + data = "你好世界" * 256 # 3 KiB blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - blob.upload_blob(data, encoding='utf-8', overwrite=True) + blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = blob.download_blob(encoding='utf-8') + stream = blob.download_blob(encoding="utf-8") assert stream.read() == data - stream = blob.download_blob(encoding='utf-8') + stream = blob.download_blob(encoding="utf-8") assert stream.read(chars=100000) == data - result = '' - stream = blob.download_blob(encoding='utf-8') + result = "" + stream = blob.download_blob(encoding="utf-8") for _ in range(4): chunk = stream.read(chars=100) result += chunk @@ -1585,21 +1564,21 @@ def test_get_blob_read_chars_ranged(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 256 # 3 KiB + data = "你好世界" * 256 # 3 KiB blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - blob.upload_blob(data, encoding='utf-8', overwrite=True) + blob.upload_blob(data, encoding="utf-8", overwrite=True) # Offset and length need to be multiple of 3 to meet unicode boundaries offset, length = 9, 1500 - expected = data[offset//3: offset//3 + length//3] - stream = blob.download_blob(offset=offset, length=length, encoding='utf-8') + expected = data[offset // 3 : offset // 3 + length // 3] + stream = blob.download_blob(offset=offset, length=length, encoding="utf-8") assert stream.read() == expected - stream = blob.download_blob(offset=offset, length=length, encoding='utf-8') + stream = blob.download_blob(offset=offset, length=length, encoding="utf-8") assert stream.read(chars=100000) == expected - result = '' - stream = blob.download_blob(offset=offset, length=length, encoding='utf-8') + result = "" + stream = blob.download_blob(offset=offset, length=length, encoding="utf-8") for _ in range(4): chunk = stream.read(chars=100) result += chunk @@ -1615,28 +1594,28 @@ def test_get_blob_read_chars_mixed(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 2 + data = "你好世界" * 2 blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - blob.upload_blob(data, encoding='utf-8', overwrite=True) + blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = blob.download_blob(encoding='utf-8') + stream = blob.download_blob(encoding="utf-8") # Read some data as chars, this should prevent any reading as bytes - assert stream.read(chars=4) == '你好世界' + assert stream.read(chars=4) == "你好世界" # readinto, chunks, and read(size=x) should now be blocked with pytest.raises(ValueError) as e: stream.readinto(BytesIO()) - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) with pytest.raises(ValueError) as e: stream.chunks() - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) with pytest.raises(ValueError) as e: stream.read(size=12) - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) # read() should still work to get remaining chars - assert stream.read() == '你好世界' + assert stream.read() == "你好世界" @BlobPreparer() @recorded_by_proxy @@ -1645,15 +1624,15 @@ def test_get_blob_read_chars_utf32(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") self._setup(storage_account_name, storage_account_key, upload_blob=False) - data = '你好世界' * 256 - encoding = 'utf-32' + data = "你好世界" * 256 + encoding = "utf-32" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) blob.upload_blob(data, encoding=encoding, overwrite=True) stream = blob.download_blob(encoding=encoding) assert stream.read() == data - result = '' + result = "" stream = blob.download_blob(encoding=encoding) for _ in range(4): chunk = stream.read(chars=100) diff --git a/sdk/storage/azure-storage-blob/tests/test_get_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_get_blob_async.py index fb212f6c8acd..230e6646e474 100644 --- a/sdk/storage/azure-storage-blob/tests/test_get_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_get_blob_async.py @@ -21,7 +21,7 @@ from test_helpers_async import ProgressTracker, NonSeekableStream # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" # ------------------------------------------------------------------------------ @@ -32,10 +32,11 @@ async def _setup(self, storage_account_name, key, upload_blob=True): self.account_url(storage_account_name, "blob"), credential=key, max_single_get_size=32 * 1024, - max_chunk_get_size=4 * 1024) + max_chunk_get_size=4 * 1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') - self.byte_blob = self.get_resource_name('byteblob') + self.container_name = self.get_resource_name("utcontainer") + self.byte_blob = self.get_resource_name("byteblob") self.byte_data = self.get_random_bytes(64 * 1024 + 5) if self.is_live: container = self.bsc.get_container_client(self.container_name) @@ -60,7 +61,7 @@ async def test_unicode_get_blob_unicode_data(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - blob_data = u'hello world啊齄丂狛狜'.encode('utf-8') + blob_data = "hello world啊齄丂狛狜".encode("utf-8") blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) @@ -80,7 +81,7 @@ async def test_unicode_get_blob_binary_data(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==' + base64_data = "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==" binary_data = base64.b64decode(base64_data) blob_name = self._get_blob_reference() @@ -102,7 +103,7 @@ async def test_get_blob_no_content(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - blob_data = b'' + blob_data = b"" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) @@ -163,7 +164,7 @@ async def test_ranged_get_blob_to_bytes_with_zero_byte(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - blob_data = b'' + blob_data = b"" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) @@ -185,7 +186,7 @@ async def test_ranged_get_blob_with_missing_start_range(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - blob_data = b'foobar' + blob_data = b"foobar" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) @@ -209,7 +210,7 @@ async def test_get_blob_to_bytes_snapshot(self, **kwargs): snapshot_ref = await blob.create_snapshot() snapshot = self.bsc.get_blob_client(self.container_name, self.byte_blob, snapshot=snapshot_ref) - await blob.upload_blob(self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches + await blob.upload_blob(self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches # Act content = await (await snapshot.download_blob(max_concurrency=2)).readall() @@ -231,8 +232,8 @@ async def test_get_blob_to_bytes_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -241,10 +242,8 @@ def callback(response): # Assert assert self.byte_data == content self.assert_download_progress( - len(self.byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -258,8 +257,8 @@ async def test_get_blob_to_bytes_non_parallel(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -268,10 +267,8 @@ def callback(response): # Assert assert self.byte_data == content self.assert_download_progress( - len(self.byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -289,8 +286,8 @@ async def test_get_blob_to_bytes_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -299,10 +296,8 @@ def callback(response): # Assert assert blob_data == content self.assert_download_progress( - len(blob_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -334,11 +329,11 @@ async def test_readinto_raises_exceptions(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # parallel tests introduce random order of requests, can only run live - callback_counter = {'value': 0} + callback_counter = {"value": 0} def callback(response): - callback_counter['value'] += 1 - if callback_counter['value'] > 3: + callback_counter["value"] += 1 + if callback_counter["value"] > 3: raise ValueError() # Arrange @@ -365,8 +360,8 @@ async def test_get_blob_to_stream_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -378,7 +373,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert self.byte_data == actual - self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -392,8 +389,8 @@ async def test_get_blob_to_stream_non_parallel(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -406,7 +403,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert self.byte_data == actual - self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -424,11 +423,10 @@ async def test_get_blob_to_stream_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) - # Act with tempfile.TemporaryFile() as temp_file: downloader = await blob.download_blob(raw_response_hook=callback, max_concurrency=2) @@ -439,7 +437,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert blob_data == actual - self.assert_download_progress(len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -455,9 +455,9 @@ async def test_ranged_get_blob_to_path(self, **kwargs): # Act end_range = self.config.max_single_get_size - FILE_PATH = 'ranged_get_blob_to_path_async.temp.{}.dat'.format(str(uuid.uuid4())) + FILE_PATH = "ranged_get_blob_to_path_async.temp.{}.dat".format(str(uuid.uuid4())) with tempfile.TemporaryFile() as temp_file: - downloader = await blob.download_blob(offset=1, length=end_range-1, max_concurrency=2) + downloader = await blob.download_blob(offset=1, length=end_range - 1, max_concurrency=2) read_bytes = await downloader.readinto(temp_file) # Assert @@ -480,8 +480,8 @@ async def test_ranged_get_blob_to_path_with_progress(self, **kwargs): blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -489,18 +489,18 @@ def callback(response): end_range = self.config.max_single_get_size + 1024 with tempfile.TemporaryFile() as temp_file: downloader = await blob.download_blob( - offset=start_range, - length=end_range, - raw_response_hook=callback, - max_concurrency=2) + offset=start_range, length=end_range, raw_response_hook=callback, max_concurrency=2 + ) read_bytes = await downloader.readinto(temp_file) # Assert assert read_bytes == self.config.max_single_get_size + 1024 temp_file.seek(0) actual = temp_file.read() - assert self.byte_data[start_range:end_range + start_range] == actual - self.assert_download_progress(end_range, self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + assert self.byte_data[start_range : end_range + start_range] == actual + self.assert_download_progress( + end_range, self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -561,7 +561,7 @@ async def test_ranged_get_blob_to_path_invalid_range_parallel(self, **kwargs): await blob.upload_blob(blob_data) # Act - FILE_PATH = 'path_invalid_range_parallel_async.temp.{}.dat'.format(str(uuid.uuid4())) + FILE_PATH = "path_invalid_range_parallel_async.temp.{}.dat".format(str(uuid.uuid4())) end_range = 2 * self.config.max_single_get_size with tempfile.TemporaryFile() as temp_file: downloader = await blob.download_blob(offset=1, length=end_range, max_concurrency=2) @@ -611,13 +611,13 @@ async def test_get_blob_to_text(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - text_blob = self.get_resource_name('textblob') + text_blob = self.get_resource_name("textblob") text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) await blob.upload_blob(text_data) # Act - stream = await blob.download_blob(max_concurrency=2, encoding='UTF-8') + stream = await blob.download_blob(max_concurrency=2, encoding="UTF-8") content = await stream.readall() # Assert @@ -633,7 +633,7 @@ async def test_get_blob_to_text_with_progress(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - text_blob = self.get_resource_name('textblob') + text_blob = self.get_resource_name("textblob") text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) await blob.upload_blob(text_data) @@ -641,24 +641,19 @@ async def test_get_blob_to_text_with_progress(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = await blob.download_blob( - raw_response_hook=callback, - max_concurrency=2, - encoding='UTF-8') + stream = await blob.download_blob(raw_response_hook=callback, max_concurrency=2, encoding="UTF-8") content = await stream.readall() # Assert assert text_data == content self.assert_download_progress( - len(text_data.encode('utf-8')), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text_data.encode("utf-8")), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -676,24 +671,19 @@ async def test_get_blob_to_text_non_parallel(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = await blob.download_blob( - raw_response_hook=callback, - max_concurrency=1, - encoding='UTF-8') + stream = await blob.download_blob(raw_response_hook=callback, max_concurrency=1, encoding="UTF-8") content = await stream.readall() # Assert assert text_data == content self.assert_download_progress( - len(text_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -711,21 +701,19 @@ async def test_get_blob_to_text_small(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act - stream = await blob.download_blob(raw_response_hook=callback, encoding='UTF-8') + stream = await blob.download_blob(raw_response_hook=callback, encoding="UTF-8") content = await stream.readall() # Assert assert blob_data == content self.assert_download_progress( - len(blob_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -735,13 +723,13 @@ async def test_get_blob_to_text_with_encoding(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - text = u'hello 啊齄丂狛狜 world' + text = "hello 啊齄丂狛狜 world" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(text, encoding='utf-16') + await blob.upload_blob(text, encoding="utf-16") # Act - stream = await blob.download_blob(encoding='utf-16') + stream = await blob.download_blob(encoding="utf-16") content = await stream.readall() # Assert @@ -755,29 +743,27 @@ async def test_get_blob_to_text_with_encoding_and_progress(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) - text = u'hello 啊齄丂狛狜 world' + text = "hello 啊齄丂狛狜 world" blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(text, encoding='utf-16') + await blob.upload_blob(text, encoding="utf-16") # Act progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) - stream = await blob.download_blob(raw_response_hook=callback, encoding='utf-16') + stream = await blob.download_blob(raw_response_hook=callback, encoding="utf-16") content = await stream.readall() # Assert assert text == content self.assert_download_progress( - len(text.encode('utf-8')), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(text.encode("utf-8")), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -837,8 +823,8 @@ async def test_get_blob_to_stream_exact_get_size(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -850,7 +836,9 @@ def callback(response): temp_file.seek(0) actual = temp_file.read() assert byte_data == actual - self.assert_download_progress(len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) + self.assert_download_progress( + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -868,8 +856,8 @@ async def test_get_blob_exact_get_size(self, **kwargs): progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -878,10 +866,8 @@ def callback(response): # Assert assert byte_data == content self.assert_download_progress( - len(byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @BlobPreparer() @recorded_by_proxy_async @@ -892,17 +878,15 @@ async def test_get_blob_exact_chunk_size(self, **kwargs): # Arrange await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() - byte_data = self.get_random_bytes( - self.config.max_single_get_size + - self.config.max_chunk_get_size) + byte_data = self.get_random_bytes(self.config.max_single_get_size + self.config.max_chunk_get_size) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(byte_data) progress = [] def callback(response): - current = response.context['download_stream_current'] - total = response.context['data_stream_total'] + current = response.context["download_stream_current"] + total = response.context["data_stream_total"] progress.append((current, total)) # Act @@ -911,10 +895,8 @@ def callback(response): # Assert assert byte_data == content self.assert_download_progress( - len(byte_data), - self.config.max_chunk_get_size, - self.config.max_single_get_size, - progress) + len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress + ) @pytest.mark.live_test_only @BlobPreparer() @@ -969,7 +951,7 @@ async def test_get_blob_range_to_stream_with_overall_md5(self, **kwargs): await self._setup(storage_account_name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) props = await blob.get_blob_properties() - props.content_settings.content_md5 = b'MDAwMDAwMDA=' + props.content_settings.content_md5 = b"MDAwMDAwMDA=" await blob.set_http_headers(props.content_settings) # Act @@ -979,7 +961,7 @@ async def test_get_blob_range_to_stream_with_overall_md5(self, **kwargs): # Assert assert read_bytes == 1024 - assert b'MDAwMDAwMDA=' == downloader.properties.content_settings.content_md5 + assert b"MDAwMDAwMDA=" == downloader.properties.content_settings.content_md5 assert downloader.size == 1024 @BlobPreparer() @@ -994,14 +976,14 @@ async def test_get_blob_range_with_overall_md5(self, **kwargs): # Arrange props = await blob.get_blob_properties() - props.content_settings.content_md5 = b'MDAwMDAwMDA=' + props.content_settings.content_md5 = b"MDAwMDAwMDA=" await blob.set_http_headers(props.content_settings) # Act content = await blob.download_blob(offset=0, length=1024, validate_content=True) # Assert - assert b'MDAwMDAwMDA=' == content.properties.content_settings.content_md5 + assert b"MDAwMDAwMDA=" == content.properties.content_settings.content_md5 @BlobPreparer() @recorded_by_proxy_async @@ -1033,15 +1015,16 @@ async def test_get_blob_progress_single_get(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'a' * 512 + data = b"a" * 512 blob_name = self._get_blob_reference() blob = BlobClient( - self.account_url(storage_account_name, 'blob'), + self.account_url(storage_account_name, "blob"), self.container_name, blob_name, credential=storage_account_key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) await blob.upload_blob(data, overwrite=True) @@ -1061,15 +1044,16 @@ async def test_get_blob_progress_chunked(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = BlobClient( - self.account_url(storage_account_name, 'blob'), + self.account_url(storage_account_name, "blob"), self.container_name, blob_name, credential=storage_account_key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) await blob.upload_blob(data, overwrite=True) @@ -1090,15 +1074,16 @@ async def test_get_blob_progress_chunked_parallel(self, **kwargs): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = BlobClient( - self.account_url(storage_account_name, 'blob'), + self.account_url(storage_account_name, "blob"), self.container_name, blob_name, credential=storage_account_key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) await blob.upload_blob(data, overwrite=True) @@ -1119,15 +1104,16 @@ async def test_get_blob_progress_range(self, **kwargs): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = BlobClient( - self.account_url(storage_account_name, 'blob'), + self.account_url(storage_account_name, "blob"), self.container_name, blob_name, credential=storage_account_key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) await blob.upload_blob(data, overwrite=True) @@ -1136,10 +1122,8 @@ async def test_get_blob_progress_range(self, **kwargs): # Act stream = await blob.download_blob( - offset=512, - length=length, - max_concurrency=3, - progress_hook=progress.assert_progress) + offset=512, length=length, max_concurrency=3, progress_hook=progress.assert_progress + ) await stream.readall() # Assert @@ -1153,15 +1137,16 @@ async def test_get_blob_progress_readinto(self, **kwargs): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account_name, storage_account_key) - data = b'a' * 5120 + data = b"a" * 5120 blob_name = self._get_blob_reference() blob = BlobClient( - self.account_url(storage_account_name, 'blob'), + self.account_url(storage_account_name, "blob"), self.container_name, blob_name, credential=storage_account_key, max_single_get_size=1024, - max_chunk_get_size=1024) + max_chunk_get_size=1024, + ) await blob.upload_blob(data, overwrite=True) @@ -1183,7 +1168,7 @@ async def test_get_blob_read_empty(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") await self._setup(storage_account_name, storage_account_key) - data = b'' + data = b"" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1205,7 +1190,7 @@ async def test_get_blob_read_all(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1225,7 +1210,7 @@ async def test_get_blob_read_single(self, **kwargs): self.bsc._config.max_single_get_size = 10 * 1024 self.bsc._config.max_chunk_get_size = 10 * 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() @@ -1254,7 +1239,7 @@ async def test_get_blob_read_small_chunks(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() @@ -1282,7 +1267,7 @@ async def test_get_blob_read_large_chunks(self, **kwargs): await self._setup(storage_account_name, storage_account_key, upload_blob=False) self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() @@ -1311,7 +1296,7 @@ async def test_get_blob_read_chunk_equal_download_chunk(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() @@ -1341,7 +1326,7 @@ async def test_get_blob_read_random_chunks(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() @@ -1369,7 +1354,7 @@ async def test_get_blob_read_parallel(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob(max_concurrency=3) @@ -1400,13 +1385,13 @@ async def test_get_blob_into_upload(self, **kwargs): self.bsc._config.max_single_put_size = 1024 self.bsc._config.max_block_size = 1024 - data = b'12345' * 205 * 15 # 15375 bytes + data = b"12345" * 205 * 15 # 15375 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) stream = await blob.download_blob() # Act - blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference() + '-copy') + blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference() + "-copy") await blob2.upload_blob(stream, overwrite=True) result = await (await blob2.download_blob()).readall() @@ -1423,7 +1408,7 @@ async def test_get_blob_read_past(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'Hello World' + data = b"Hello World" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1435,7 +1420,7 @@ async def test_get_blob_read_past(self, **kwargs): assert result == data for _ in range(3): result = await stream.read(100) - assert result == b'' + assert result == b"" @BlobPreparer() @recorded_by_proxy_async @@ -1447,7 +1432,7 @@ async def test_get_blob_read_ranged(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1459,8 +1444,8 @@ async def test_get_blob_read_ranged(self, **kwargs): data1 = await stream.read(read_size) data2 = await stream.read(read_size) - assert data1 == data[offset:offset + read_size] - assert data2 == data[offset + read_size:offset + length] + assert data1 == data[offset : offset + read_size] + assert data2 == data[offset + read_size : offset + length] offset, length = 501, 3000 stream = await blob.download_blob(offset=offset, length=length) @@ -1469,8 +1454,8 @@ async def test_get_blob_read_ranged(self, **kwargs): data1 = await stream.read(read_size) data2 = await stream.read(read_size) - assert data1 == data[offset:offset + read_size] - assert data2 == data[offset + read_size:offset + length] + assert data1 == data[offset : offset + read_size] + assert data2 == data[offset + read_size : offset + length] @BlobPreparer() @recorded_by_proxy_async @@ -1482,7 +1467,7 @@ async def test_get_blob_read_with_other_read_operations_single(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'Hello World' + data = b"Hello World" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1512,7 +1497,7 @@ async def test_get_blob_read_with_other_read_operations_single(self, **kwargs): second = second_stream.getvalue() assert first == data - assert second == b'' + assert second == b"" assert read_size == 0 @BlobPreparer() @@ -1525,7 +1510,7 @@ async def test_get_blob_read_with_other_read_operations_chunks(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1564,7 +1549,7 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 10 # 10250 bytes + data = b"12345" * 205 * 10 # 10250 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) offset, length = 1024, 2048 @@ -1574,16 +1559,16 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): first = await stream.read(100) # Read in first chunk second = await stream.readall() - assert first == data[offset:offset + 100] - assert second == data[offset + 100:offset + length] + assert first == data[offset : offset + 100] + assert second == data[offset + 100 : offset + length] offset, length = 501, 5000 stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(3000) # Read past first chunk second = await stream.readall() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] stream = await blob.download_blob(offset=offset, length=length) first = await stream.read(3000) # Read past first chunk @@ -1591,8 +1576,8 @@ async def test_get_blob_read_with_other_read_operations_ranged(self, **kwargs): read_size = await stream.readinto(second_stream) second = second_stream.getvalue() - assert first == data[offset:offset + 3000] - assert second == data[offset + 3000:offset + length] + assert first == data[offset : offset + 3000] + assert second == data[offset + 3000 : offset + length] assert read_size == len(second) @BlobPreparer() @@ -1605,7 +1590,7 @@ async def test_get_blob_read_progress(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = b'12345' * 205 * 5 # 5125 bytes + data = b"12345" * 205 * 5 # 5125 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1638,7 +1623,7 @@ async def test_get_blob_read_progress_chars(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 260 # 3120 bytes + data = "你好世界" * 260 # 3120 bytes blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, overwrite=True) @@ -1654,7 +1639,7 @@ async def assert_progress(self, current, total): self.num_read += 1 progress = CustomProgressTracker() - stream = await blob.download_blob(encoding='utf-8', progress_hook=progress.assert_progress) + stream = await blob.download_blob(encoding="utf-8", progress_hook=progress.assert_progress) # Act / Assert for _ in range(4): @@ -1671,18 +1656,18 @@ async def test_get_blob_read_chars_single(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 5 + data = "你好世界" * 5 blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await blob.upload_blob(data, encoding='utf-8', overwrite=True) + await blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") assert await stream.read() == data - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") assert await stream.read(chars=100000) == data - result = '' - stream = await blob.download_blob(encoding='utf-8') + result = "" + stream = await blob.download_blob(encoding="utf-8") for _ in range(4): chunk = await stream.read(chars=5) result += chunk @@ -1701,18 +1686,18 @@ async def test_get_blob_read_chars_chunks(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 256 # 3 KiB + data = "你好世界" * 256 # 3 KiB blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await blob.upload_blob(data, encoding='utf-8', overwrite=True) + await blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") assert await stream.read() == data - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") assert await stream.read(chars=100000) == data - result = '' - stream = await blob.download_blob(encoding='utf-8') + result = "" + stream = await blob.download_blob(encoding="utf-8") for _ in range(4): chunk = await stream.read(chars=100) result += chunk @@ -1731,21 +1716,21 @@ async def test_get_blob_read_chars_ranged(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 256 # 3 KiB + data = "你好世界" * 256 # 3 KiB blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await blob.upload_blob(data, encoding='utf-8', overwrite=True) + await blob.upload_blob(data, encoding="utf-8", overwrite=True) # Offset and length need to be multiple of 3 to meet unicode boundaries offset, length = 9, 1500 - expected = data[offset // 3: offset // 3 + length // 3] - stream = await blob.download_blob(offset=offset, length=length, encoding='utf-8') + expected = data[offset // 3 : offset // 3 + length // 3] + stream = await blob.download_blob(offset=offset, length=length, encoding="utf-8") assert await stream.read() == expected - stream = await blob.download_blob(offset=offset, length=length, encoding='utf-8') + stream = await blob.download_blob(offset=offset, length=length, encoding="utf-8") assert await stream.read(chars=100000) == expected - result = '' - stream = await blob.download_blob(offset=offset, length=length, encoding='utf-8') + result = "" + stream = await blob.download_blob(offset=offset, length=length, encoding="utf-8") for _ in range(4): chunk = await stream.read(chars=100) result += chunk @@ -1764,28 +1749,28 @@ async def test_get_blob_read_chars_mixed(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 2 + data = "你好世界" * 2 blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) - await blob.upload_blob(data, encoding='utf-8', overwrite=True) + await blob.upload_blob(data, encoding="utf-8", overwrite=True) - stream = await blob.download_blob(encoding='utf-8') + stream = await blob.download_blob(encoding="utf-8") # Read some data as chars, this should prevent any reading as bytes - assert await stream.read(chars=4) == '你好世界' + assert await stream.read(chars=4) == "你好世界" # readinto, chunks, and read(size=x) should now be blocked with pytest.raises(ValueError) as e: await stream.readinto(BytesIO()) - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) with pytest.raises(ValueError) as e: stream.chunks() - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) with pytest.raises(ValueError) as e: await stream.read(size=12) - assert 'Stream has been partially read in text mode.' in str(e.value) + assert "Stream has been partially read in text mode." in str(e.value) # read() should still work to get remaining chars - assert await stream.read() == '你好世界' + assert await stream.read() == "你好世界" @BlobPreparer() @recorded_by_proxy_async @@ -1797,15 +1782,15 @@ async def test_get_blob_read_chars_utf32(self, **kwargs): self.bsc._config.max_single_get_size = 1024 self.bsc._config.max_chunk_get_size = 1024 - data = '你好世界' * 256 - encoding = 'utf-32' + data = "你好世界" * 256 + encoding = "utf-32" blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) await blob.upload_blob(data, encoding=encoding, overwrite=True) stream = await blob.download_blob(encoding=encoding) assert await stream.read() == data - result = '' + result = "" stream = await blob.download_blob(encoding=encoding) for _ in range(4): chunk = await stream.read(chars=100) @@ -1815,4 +1800,5 @@ async def test_get_blob_read_chars_utf32(self, **kwargs): result += await stream.readall() assert result == data + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_large_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_large_block_blob.py index 49f0c31960cb..c04c3fdb5db3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_large_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_large_block_blob.py @@ -17,12 +17,12 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'largeblob' +TEST_BLOB_PREFIX = "largeblob" LARGE_BLOB_SIZE = 12 * 1024 * 1024 LARGE_BLOCK_SIZE = 6 * 1024 * 1024 # ------------------------------------------------------------------------------ -if platform.python_implementation() == 'PyPy': +if platform.python_implementation() == "PyPy": pytest.skip("Skip tests for Pypy", allow_module_level=True) @@ -36,9 +36,10 @@ def _setup(self, storage_account_name, key): credential=key, max_single_put_size=32 * 1024, max_block_size=2 * 1024 * 1024, - min_large_block_upload_threshold=1 * 1024 * 1024) + min_large_block_upload_threshold=1 * 1024 * 1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: @@ -53,13 +54,14 @@ def _get_blob_reference(self): def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(b'') + blob.upload_blob(b"") return blob def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = blob.download_blob() assert b"".join(list(actual_data.chunks())) == expected_data + # -------------------------------------------------------------------------- @pytest.mark.live_test_only @@ -73,12 +75,11 @@ def test_put_block_bytes_large(self, **kwargs): # Act for i in range(5): - resp = blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE)) + resp = blob.stage_block("block {0}".format(i).encode("utf-8"), urandom(LARGE_BLOCK_SIZE)) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -92,13 +93,12 @@ def test_put_block_bytes_large_with_md5(self, **kwargs): # Act for i in range(5): resp = blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - urandom(LARGE_BLOCK_SIZE), - validate_content=True) + "block {0}".format(i).encode("utf-8"), urandom(LARGE_BLOCK_SIZE), validate_content=True + ) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -112,14 +112,11 @@ def test_put_block_stream_large(self, **kwargs): # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) - resp = resp = blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - stream, - length=LARGE_BLOCK_SIZE) + resp = resp = blob.stage_block("block {0}".format(i).encode("utf-8"), stream, length=LARGE_BLOCK_SIZE) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -134,14 +131,12 @@ def test_put_block_stream_large_with_md5(self, **kwargs): for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - stream, - length=LARGE_BLOCK_SIZE, - validate_content=True) + "block {0}".format(i).encode("utf-8"), stream, length=LARGE_BLOCK_SIZE, validate_content=True + ) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -219,9 +214,10 @@ def test_create_large_blob_from_path_with_progress(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -246,9 +242,7 @@ def test_create_large_blob_from_path_with_properties(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -293,9 +287,10 @@ def test_creat_lrgblob_frm_stream_w_progress_chnkd_upload(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -341,9 +336,7 @@ def test_creat_lrgblob_frm_strm_chnkd_uplod_w_count_n_props(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob_size = len(data) - 301 with tempfile.TemporaryFile() as temp_file: temp_file.write(data) @@ -368,9 +361,7 @@ def test_creat_lrg_blob_frm_stream_chnked_upload_w_props(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -382,4 +373,5 @@ def test_creat_lrg_blob_frm_stream_chnked_upload_w_props(self, **kwargs): assert properties.content_settings.content_type == content_settings.content_type assert properties.content_settings.content_language == content_settings.content_language -# ------------------------------------------------------------------------------ \ No newline at end of file + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_large_block_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_large_block_blob_async.py index 93e8452d9612..7369ea39ee4d 100644 --- a/sdk/storage/azure-storage-blob/tests/test_large_block_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_large_block_blob_async.py @@ -19,7 +19,7 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'largeblob' +TEST_BLOB_PREFIX = "largeblob" LARGE_BLOB_SIZE = 12 * 1024 * 1024 LARGE_BLOCK_SIZE = 6 * 1024 * 1024 # ------------------------------------------------------------------------------ @@ -35,9 +35,10 @@ async def _setup(self, storage_account_name, key): credential=key, max_single_put_size=32 * 1024, max_block_size=2 * 1024 * 1024, - min_large_block_upload_threshold=1 * 1024 * 1024) + min_large_block_upload_threshold=1 * 1024 * 1024, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") if self.is_live: try: await self.bsc.create_container(self.container_name) @@ -51,7 +52,7 @@ def _get_blob_reference(self): async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(b'') + await blob.upload_blob(b"") return blob async def assertBlobEqual(self, container_name, blob_name, expected_data): @@ -61,6 +62,7 @@ async def assertBlobEqual(self, container_name, blob_name, expected_data): async for data in actual_data.chunks(): actual_bytes += data assert actual_bytes == expected_data + # -------------------------------------------------------------------------- @pytest.mark.live_test_only @@ -76,8 +78,7 @@ async def test_put_block_bytes_large(self, **kwargs): # Act futures = [] for i in range(5): - futures.append(blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE))) + futures.append(blob.stage_block("block {0}".format(i).encode("utf-8"), urandom(LARGE_BLOCK_SIZE))) await asyncio.gather(*futures) @@ -94,13 +95,12 @@ async def test_put_block_bytes_large_with_md5(self, **kwargs): # Act for i in range(5): resp = await blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - urandom(LARGE_BLOCK_SIZE), - validate_content=True) + "block {0}".format(i).encode("utf-8"), urandom(LARGE_BLOCK_SIZE), validate_content=True + ) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -115,14 +115,11 @@ async def test_put_block_stream_large(self, **kwargs): # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) - resp = await blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - stream, - length=LARGE_BLOCK_SIZE) + resp = await blob.stage_block("block {0}".format(i).encode("utf-8"), stream, length=LARGE_BLOCK_SIZE) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp @pytest.mark.live_test_only @BlobPreparer() @@ -138,14 +135,12 @@ async def test_put_block_stream_large_with_md5(self, **kwargs): for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = await blob.stage_block( - 'block {0}'.format(i).encode('utf-8'), - stream, - length=LARGE_BLOCK_SIZE, - validate_content=True) + "block {0}".format(i).encode("utf-8"), stream, length=LARGE_BLOCK_SIZE, validate_content=True + ) assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp # Assert @@ -194,7 +189,6 @@ async def test_create_large_blob_from_path_with_md5(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - @pytest.mark.live_test_only @BlobPreparer() async def test_create_large_blob_from_path_non_parallel(self, **kwargs): @@ -216,7 +210,6 @@ async def test_create_large_blob_from_path_non_parallel(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob_name, data) - @pytest.mark.live_test_only @BlobPreparer() async def test_create_large_blob_from_path_with_progress(self, **kwargs): @@ -231,9 +224,10 @@ async def test_create_large_blob_from_path_with_progress(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -246,7 +240,6 @@ def callback(response): await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) - @pytest.mark.live_test_only @BlobPreparer() async def test_create_large_blob_from_path_with_properties(self, **kwargs): @@ -260,9 +253,7 @@ async def test_create_large_blob_from_path_with_properties(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -309,9 +300,10 @@ async def test_creat_lrgblob_frm_strm_w_prgrss_chnkduplod(self, **kwargs): # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -359,9 +351,7 @@ async def test_creat_lrg_frm_stream_chnk_upload_w_cntnprops(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") blob_size = len(data) - 301 with tempfile.TemporaryFile() as temp_file: temp_file.write(data) @@ -387,9 +377,7 @@ async def test_create_large_from_stream_chunk_upld_with_props(self, **kwargs): data = bytearray(urandom(LARGE_BLOB_SIZE)) # Act - content_settings = ContentSettings( - content_type='image/png', - content_language='spanish') + content_settings = ContentSettings(content_type="image/png", content_language="spanish") with tempfile.TemporaryFile() as temp_file: temp_file.write(data) temp_file.seek(0) @@ -401,4 +389,5 @@ async def test_create_large_from_stream_chunk_upld_with_props(self, **kwargs): assert properties.content_settings.content_type == content_settings.content_type assert properties.content_settings.content_language == content_settings.content_language + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py index b632d76e9fd8..92d43ffac5cf 100644 --- a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py @@ -19,23 +19,24 @@ from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'largestblob' +TEST_BLOB_PREFIX = "largestblob" LARGEST_BLOCK_SIZE = 4000 * 1024 * 1024 LARGEST_SINGLE_UPLOAD_SIZE = 5000 * 1024 * 1024 LARGE_BLOCK_SIZE = 100 * 1024 * 1024 # ------------------------------------------------------------------------------ -if platform.python_implementation() == 'PyPy': +if platform.python_implementation() == "PyPy": pytest.skip("Skip tests for Pypy", allow_module_level=True) class TestStorageLargestBlockBlob(StorageRecordedTestCase): def _setup( - self, storage_account_name, + self, + storage_account_name, key, additional_policies=None, min_large_block_upload_threshold=1 * 1024 * 1024, - max_single_put_size=32 * 1024 + max_single_put_size=32 * 1024, ): self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -43,9 +44,10 @@ def _setup( max_single_put_size=max_single_put_size, max_block_size=LARGEST_BLOCK_SIZE, min_large_block_upload_threshold=min_large_block_upload_threshold, - _additional_pipeline_policies=additional_policies) + _additional_pipeline_policies=additional_policies, + ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") self.container_name = self.container_name + str(uuid.uuid4()) if self.is_live: @@ -58,7 +60,7 @@ def _get_blob_reference(self): def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - blob.upload_blob(b'') + blob.upload_blob(b"") return blob # --Test cases for block blobs -------------------------------------------- @@ -74,19 +76,16 @@ def test_put_block_bytes_largest(self, **kwargs): # Act data = urandom(LARGEST_BLOCK_SIZE) - blockId = str(uuid.uuid4()).encode('utf-8') - resp = blob.stage_block( - blockId, - data, - length=LARGEST_BLOCK_SIZE) + blockId = str(uuid.uuid4()).encode("utf-8") + resp = blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) blob.commit_block_list([BlobBlock(blockId)]) block_list = blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -106,19 +105,16 @@ def test_put_block_bytes_largest_without_network(self, **kwargs): # Act data = urandom(LARGEST_BLOCK_SIZE) - blockId = str(uuid.uuid4()).encode('utf-8') - resp = blob.stage_block( - blockId, - data, - length=LARGEST_BLOCK_SIZE) + blockId = str(uuid.uuid4()).encode("utf-8") + resp = blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) blob.commit_block_list([BlobBlock(blockId)]) block_list = blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -140,19 +136,15 @@ def test_put_block_stream_largest(self, **kwargs): stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) - resp = blob.stage_block( - blockId, - stream, - length=LARGEST_BLOCK_SIZE, - client_request_id=requestId) + resp = blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) blob.commit_block_list([BlobBlock(blockId)]) block_list = blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -174,19 +166,15 @@ def test_put_block_stream_largest_without_network(self, **kwargs): stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) - resp = blob.stage_block( - blockId, - stream, - length=LARGEST_BLOCK_SIZE, - client_request_id=requestId) + resp = blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) blob.commit_block_list([BlobBlock(blockId)]) block_list = blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -215,7 +203,6 @@ def test_create_largest_blob_from_path(self, **kwargs): temp_file.seek(0) blob.upload_blob(temp_file, max_concurrency=2) - def test_substream_for_single_thread_upload_large_block(self): with tempfile.TemporaryFile() as temp_file: largeStream = LargeStream(LARGE_BLOCK_SIZE, 4 * 1024 * 1024) @@ -280,7 +267,7 @@ def test_create_largest_blob_from_stream_without_network(self, **kwargs): number_of_blocks = 50000 - stream = LargeStream(LARGEST_BLOCK_SIZE*number_of_blocks) + stream = LargeStream(LARGEST_BLOCK_SIZE * number_of_blocks) # Act blob.upload_blob(stream, max_concurrency=1) @@ -297,8 +284,12 @@ def test_create_largest_blob_from_stream_single_upload_without_network(self, **k payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential(storage_account_name, storage_account_key) - self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy], - max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE+1) + self._setup( + storage_account_name, + storage_account_key, + [payload_dropping_policy, credential_policy], + max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE + 1, + ) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -313,7 +304,7 @@ def test_create_largest_blob_from_stream_single_upload_without_network(self, **k class LargeStream: - def __init__(self, length, initial_buffer_length=1024*1024): + def __init__(self, length, initial_buffer_length=1024 * 1024): self._base_data = urandom(initial_buffer_length) self._base_data_length = initial_buffer_length self._position = 0 @@ -367,15 +358,17 @@ def _is_put_block_request(request): query = request.http_request.query return query and "comp" in query and query["comp"] == "block" + def _is_put_blob_request(request): query = request.http_request.query return request.http_request.method == "PUT" and not query + def _get_body_length(request): body = request.http_request.body length = 0 if hasattr(body, "read"): - chunk = body.read(10*1024*1024) + chunk = body.read(10 * 1024 * 1024) while chunk: length = length + len(chunk) chunk = body.read(10 * 1024 * 1024) @@ -383,4 +376,5 @@ def _get_body_length(request): length = len(body) return length + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob_async.py index 104ee894a2c6..ad556afd3278 100644 --- a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob_async.py @@ -21,22 +21,23 @@ from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'largestblob' +TEST_BLOB_PREFIX = "largestblob" LARGEST_BLOCK_SIZE = 4000 * 1024 * 1024 LARGEST_SINGLE_UPLOAD_SIZE = 5000 * 1024 * 1024 # ------------------------------------------------------------------------------ -if platform.python_implementation() == 'PyPy': +if platform.python_implementation() == "PyPy": pytest.skip("Skip tests for Pypy", allow_module_level=True) class TestStorageLargestBlockBlobAsync(AsyncStorageRecordedTestCase): async def _setup( - self, storage_account_name, + self, + storage_account_name, key, additional_policies=None, min_large_block_upload_threshold=1 * 1024 * 1024, - max_single_put_size=32 * 1024 + max_single_put_size=32 * 1024, ): self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), @@ -44,10 +45,10 @@ async def _setup( max_single_put_size=max_single_put_size, max_block_size=LARGEST_BLOCK_SIZE, min_large_block_upload_threshold=min_large_block_upload_threshold, - _additional_pipeline_policies=additional_policies + _additional_pipeline_policies=additional_policies, ) self.config = self.bsc._config - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") self.container_name = self.container_name + str(uuid.uuid4()) if self.is_live: @@ -60,7 +61,7 @@ def _get_blob_reference(self): async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) - await blob.upload_blob(b'') + await blob.upload_blob(b"") return blob # --Test cases for block blobs -------------------------------------------- @@ -76,19 +77,16 @@ async def test_put_block_bytes_largest(self, **kwargs): # Act data = urandom(LARGEST_BLOCK_SIZE) - blockId = str(uuid.uuid4()).encode('utf-8') - resp = await blob.stage_block( - blockId, - data, - length=LARGEST_BLOCK_SIZE) + blockId = str(uuid.uuid4()).encode("utf-8") + resp = await blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -108,19 +106,16 @@ async def test_put_block_bytes_largest_without_network(self, **kwargs): # Act data = urandom(LARGEST_BLOCK_SIZE) - blockId = str(uuid.uuid4()).encode('utf-8') - resp = await blob.stage_block( - blockId, - data, - length=LARGEST_BLOCK_SIZE) + blockId = str(uuid.uuid4()).encode("utf-8") + resp = await blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -142,19 +137,15 @@ async def test_put_block_stream_largest(self, **kwargs): stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) - resp = await blob.stage_block( - blockId, - stream, - length=LARGEST_BLOCK_SIZE, - client_request_id=requestId) + resp = await blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -176,19 +167,15 @@ async def test_put_block_stream_largest_without_network(self, **kwargs): stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) - resp = await blob.stage_block( - blockId, - stream, - length=LARGEST_BLOCK_SIZE, - client_request_id=requestId) + resp = await blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert assert resp is not None - assert 'content_md5' in resp - assert 'content_crc64' in resp - assert 'request_id' in resp + assert "content_md5" in resp + assert "content_crc64" in resp + assert "request_id" in resp assert block_list is not None assert len(block_list) == 2 assert len(block_list[1]) == 0 @@ -258,7 +245,7 @@ async def test_create_largest_blob_from_stream_without_network(self, **kwargs): number_of_blocks = 50000 - stream = LargeStream(LARGEST_BLOCK_SIZE*number_of_blocks) + stream = LargeStream(LARGEST_BLOCK_SIZE * number_of_blocks) # Act await blob.upload_blob(stream, max_concurrency=1) @@ -275,8 +262,12 @@ async def test_create_largest_blob_from_stream_single_upload_without_network(sel payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential(storage_account_name, storage_account_key) - await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy], - max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE + 1) + await self._setup( + storage_account_name, + storage_account_key, + [payload_dropping_policy, credential_policy], + max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE + 1, + ) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) @@ -348,15 +339,17 @@ def _is_put_block_request(request): query = request.http_request.query return query and "comp" in query and query["comp"] == "block" + def _is_put_blob_request(request): query = request.http_request.query return request.http_request.method == "PUT" and not query + def _get_body_length(request): body = request.http_request.body length = 0 if hasattr(body, "read"): - chunk = body.read(10*1024*1024) + chunk = body.read(10 * 1024 * 1024) while chunk: length = length + len(chunk) chunk = body.read(10 * 1024 * 1024) @@ -364,4 +357,5 @@ def _get_body_length(request): length = len(body) return length + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_logging.py b/sdk/storage/azure-storage-blob/tests/test_logging.py index 52e1d517b117..9cb69f8709e9 100644 --- a/sdk/storage/azure-storage-blob/tests/test_logging.py +++ b/sdk/storage/azure-storage-blob/tests/test_logging.py @@ -15,7 +15,7 @@ ContainerClient, ContainerSasPermissions, generate_blob_sas, - generate_container_sas + generate_container_sas, ) from azure.storage.blob._shared.shared_access_signature import QueryStringConstants @@ -29,14 +29,15 @@ from urlparse import parse_qs, urlparse from urllib2 import quote -_AUTHORIZATION_HEADER_NAME = 'Authorization' +_AUTHORIZATION_HEADER_NAME = "Authorization" + class TestStorageLogging(StorageRecordedTestCase): def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") # create source blob to be copied from - self.source_blob_name = self.get_resource_name('srcblob') + self.source_blob_name = self.get_resource_name("srcblob") self.source_blob_data = self.get_random_bytes(4 * 1024) source_blob = bsc.get_blob_client(self.container_name, self.source_blob_name) @@ -60,7 +61,7 @@ def _setup(self, bsc): ) sas_source = BlobClient.from_blob_url(source_blob.url, credential=sas_token) self.source_blob_url = sas_source.url - + @BlobPreparer() @recorded_by_proxy def test_logging_request_and_response_body(self, **kwargs): @@ -68,10 +69,12 @@ def test_logging_request_and_response_body(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, logging_enable=True) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, logging_enable=True + ) self._setup(bsc) container = bsc.get_container_client(self.container_name) - request_body = 'testloggingbody' + request_body = "testloggingbody" blob_name = self.get_resource_name("testloggingblob") blob_client = container.get_blob_client(blob_name) blob_client.upload_blob(request_body, overwrite=True) @@ -105,7 +108,7 @@ def test_authorization_is_scrubbed_off(self, **kwargs): # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value assert _AUTHORIZATION_HEADER_NAME in log_as_str - assert not 'SharedKey' in log_as_str + assert not "SharedKey" in log_as_str @BlobPreparer() @recorded_by_proxy @@ -152,24 +155,23 @@ def test_copy_source_sas_is_scrubbed_off(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) self._setup(bsc) # Arrange - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = bsc.get_blob_client(self.container_name, dest_blob_name) # parse out the signed signature query_parameters = urlparse(self.source_blob_url).query token_components = parse_qs(query_parameters) if QueryStringConstants.SIGNED_SIGNATURE not in token_components: - pytest.fail("Blob URL {} doesn't contain {}, parsed query params: {}".format( - self.source_blob_url, - QueryStringConstants.SIGNED_SIGNATURE, - list(token_components.keys()) - )) + pytest.fail( + "Blob URL {} doesn't contain {}, parsed query params: {}".format( + self.source_blob_url, QueryStringConstants.SIGNED_SIGNATURE, list(token_components.keys()) + ) + ) signed_signature = quote(token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) # Act with LogCaptured(self) as log_captured: - dest_blob.start_copy_from_url( - self.source_blob_url, requires_sync=True, logging_enable=True) + dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True, logging_enable=True) log_as_str = log_captured.getvalue() # Assert @@ -180,4 +182,4 @@ def test_copy_source_sas_is_scrubbed_off(self, **kwargs): # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value assert _AUTHORIZATION_HEADER_NAME in log_as_str - assert not 'SharedKey' in log_as_str + assert not "SharedKey" in log_as_str diff --git a/sdk/storage/azure-storage-blob/tests/test_logging_async.py b/sdk/storage/azure-storage-blob/tests/test_logging_async.py index b7c48bf72c8a..0bb1c736adb3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_logging_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_logging_async.py @@ -23,15 +23,15 @@ from urlparse import parse_qs, urlparse from urllib2 import quote -_AUTHORIZATION_HEADER_NAME = 'Authorization' +_AUTHORIZATION_HEADER_NAME = "Authorization" class TestStorageLoggingAsync(AsyncStorageRecordedTestCase): async def _setup(self, bsc): - self.container_name = self.get_resource_name('utcontainer') + self.container_name = self.get_resource_name("utcontainer") # create source blob to be copied from - self.source_blob_name = self.get_resource_name('srcblob') + self.source_blob_name = self.get_resource_name("srcblob") self.source_blob_data = self.get_random_bytes(4 * 1024) source_blob = bsc.get_blob_client(self.container_name, self.source_blob_name) @@ -62,11 +62,13 @@ async def test_logging_request_and_response_body(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, logging_enable=True) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), storage_account_key, logging_enable=True + ) await self._setup(bsc) # Arrange container = bsc.get_container_client(self.container_name) - request_body = 'testloggingbody' + request_body = "testloggingbody" blob_name = self.get_resource_name("testloggingblob") blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(request_body, overwrite=True) @@ -81,7 +83,7 @@ async def test_logging_request_and_response_body(self, **kwargs): log_as_str = log_captured.getvalue() assert request_body in log_as_str assert log_as_str.count(request_body) == 1 - + @BlobPreparer() @recorded_by_proxy_async async def test_authorization_is_scrubbed_off(self, **kwargs): @@ -100,7 +102,7 @@ async def test_authorization_is_scrubbed_off(self, **kwargs): # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value assert _AUTHORIZATION_HEADER_NAME in log_as_str - assert not 'SharedKey' in log_as_str + assert not "SharedKey" in log_as_str @BlobPreparer() @recorded_by_proxy_async @@ -148,24 +150,23 @@ async def test_copy_source_sas_is_scrubbed_off(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) await self._setup(bsc) # Arrange - dest_blob_name = self.get_resource_name('destblob') + dest_blob_name = self.get_resource_name("destblob") dest_blob = bsc.get_blob_client(self.container_name, dest_blob_name) # parse out the signed signature query_parameters = urlparse(self.source_blob_url).query token_components = parse_qs(query_parameters) if QueryStringConstants.SIGNED_SIGNATURE not in token_components: - pytest.fail("Blob URL {} doesn't contain {}, parsed query params: {}".format( - self.source_blob_url, - QueryStringConstants.SIGNED_SIGNATURE, - list(token_components.keys()) - )) + pytest.fail( + "Blob URL {} doesn't contain {}, parsed query params: {}".format( + self.source_blob_url, QueryStringConstants.SIGNED_SIGNATURE, list(token_components.keys()) + ) + ) signed_signature = quote(token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) # Act with LogCaptured(self) as log_captured: - await dest_blob.start_copy_from_url( - self.source_blob_url, requires_sync=True, logging_enable=True) + await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True, logging_enable=True) log_as_str = log_captured.getvalue() # Assert @@ -176,4 +177,4 @@ async def test_copy_source_sas_is_scrubbed_off(self, **kwargs): # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value assert _AUTHORIZATION_HEADER_NAME in log_as_str - assert not 'SharedKey' in log_as_str + assert not "SharedKey" in log_as_str diff --git a/sdk/storage/azure-storage-blob/tests/test_ors.py b/sdk/storage/azure-storage-blob/tests/test_ors.py index 0a7075fdb0ba..920b8d2428cc 100644 --- a/sdk/storage/azure-storage-blob/tests/test_ors.py +++ b/sdk/storage/azure-storage-blob/tests/test_ors.py @@ -25,12 +25,12 @@ class TestStorageObjectReplication(StorageRecordedTestCase): # mock a response to test the deserializer def test_deserialize_ors_policies(self): headers = { - 'x-ms-or-111_111': 'Completed', - 'x-ms-or-111_222': 'Failed', - 'x-ms-or-222_111': 'Completed', - 'x-ms-or-222_222': 'Failed', - 'x-ms-or-policy-id': '333', # to be ignored - 'x-ms-not-related': 'garbage', # to be ignored + "x-ms-or-111_111": "Completed", + "x-ms-or-111_222": "Failed", + "x-ms-or-222_111": "Completed", + "x-ms-or-222_222": "Failed", + "x-ms-or-policy-id": "333", # to be ignored + "x-ms-not-related": "garbage", # to be ignored } result = deserialize_ors_policies(headers) @@ -39,10 +39,10 @@ def test_deserialize_ors_policies(self): assert len(result[1].rules) == 2 # 2 rules for policy 222 # check individual result - assert result[0].rules[0].status == 'Completed' if result[0].rules[0].rule_id == '111' else 'Failed' - assert result[0].rules[1].status == 'Failed' if result[0].rules[1].rule_id == '222' else 'Completed' - assert result[1].rules[0].status == 'Completed' if result[1].rules[0].rule_id == '111' else 'Failed' - assert result[1].rules[1].status == 'Failed' if result[1].rules[1].rule_id == '222' else 'Completed' + assert result[0].rules[0].status == "Completed" if result[0].rules[0].rule_id == "111" else "Failed" + assert result[0].rules[1].status == "Failed" if result[0].rules[1].rule_id == "222" else "Completed" + assert result[1].rules[0].status == "Completed" if result[1].rules[0].rule_id == "111" else "Failed" + assert result[1].rules[1].status == "Failed" if result[1].rules[1].rule_id == "222" else "Completed" @pytest.mark.playback_test_only @BlobPreparer() @@ -52,9 +52,7 @@ def test_ors_source(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) blob = bsc.get_blob_client(container=self.SRC_CONTAINER, blob=self.BLOB_NAME) # Act @@ -64,13 +62,13 @@ def test_ors_source(self, **kwargs): assert isinstance(props, BlobProperties) assert props.object_replication_source_properties is not None for replication_policy in props.object_replication_source_properties: - assert replication_policy.policy_id != '' + assert replication_policy.policy_id != "" assert replication_policy.rules is not None for rule in replication_policy.rules: - assert rule.rule_id != '' + assert rule.rule_id != "" assert rule.status is not None - assert rule.status != '' + assert rule.status != "" # Check that the download function gives back the same result stream = blob.download_blob() @@ -84,9 +82,7 @@ def test_ors_destination(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) blob = bsc.get_blob_client(container=self.DST_CONTAINER, blob=self.BLOB_NAME) # Act @@ -100,4 +96,5 @@ def test_ors_destination(self, **kwargs): stream = blob.download_blob() assert stream.properties.object_replication_destination_policy == props.object_replication_destination_policy + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_ors_async.py b/sdk/storage/azure-storage-blob/tests/test_ors_async.py index a1474398534d..aba491a5ab06 100644 --- a/sdk/storage/azure-storage-blob/tests/test_ors_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_ors_async.py @@ -46,13 +46,13 @@ async def test_ors_source(self, **kwargs): assert isinstance(props, BlobProperties) assert props.object_replication_source_properties is not None for replication_policy in props.object_replication_source_properties: - assert replication_policy.policy_id != '' + assert replication_policy.policy_id != "" assert replication_policy.rules is not None for rule in replication_policy.rules: - assert rule.rule_id != '' + assert rule.rule_id != "" assert rule.status is not None - assert rule.status != '' + assert rule.status != "" # Check that the download function gives back the same result stream = await blob.download_blob() @@ -83,4 +83,5 @@ async def test_ors_destination(self, **kwargs): stream = await blob.download_blob() assert stream.properties.object_replication_destination_policy == props.object_replication_destination_policy + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_page_blob.py b/sdk/storage/azure-storage-blob/tests/test_page_blob.py index 2c76e73854e4..ec887952dbd3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_page_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_page_blob.py @@ -22,7 +22,8 @@ ImmutabilityPolicy, PremiumPageBlobTier, SequenceNumberAction, - generate_blob_sas) + generate_blob_sas, +) from azure.storage.blob._shared.policies import StorageContentValidation from devtools_testutils import recorded_by_proxy @@ -31,7 +32,7 @@ from test_helpers import NonSeekableStream, ProgressTracker # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 10 * 1024 + 512 EIGHT_TB = 8 * 1024 * 1024 * 1024 * 1024 SOURCE_BLOB_SIZE = 8 * 1024 @@ -43,8 +44,8 @@ class TestStoragePageBlob(StorageRecordedTestCase): def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") if self.is_live: try: bsc.create_container(self.container_name) @@ -56,9 +57,7 @@ def _setup(self, bsc): pass def _get_blob_reference(self, bsc) -> BlobClient: - return bsc.get_blob_client( - self.container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + return bsc.get_blob_client(self.container_name, self.get_resource_name(TEST_BLOB_PREFIX)) def _create_blob(self, bsc, length=512, sequence_number=None, tags=None) -> BlobClient: blob = self._get_blob_reference(bsc) @@ -66,15 +65,15 @@ def _create_blob(self, bsc, length=512, sequence_number=None, tags=None) -> Blob return blob def _create_source_blob_with_special_chars(self, bs, data, offset, length) -> BlobClient: - blob_client = bs.get_blob_client(self.source_container_name, - 'भारत¥test/testsubÐirÍ/' + self.get_resource_name('srcÆblob')) + blob_client = bs.get_blob_client( + self.source_container_name, "भारत¥test/testsubÐirÍ/" + self.get_resource_name("srcÆblob") + ) blob_client.create_page_blob(size=length) blob_client.upload_page(data, offset=offset, length=length) return blob_client def _create_source_blob(self, bs, data, offset, length) -> BlobClient: - blob_client = bs.get_blob_client(self.source_container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + blob_client = bs.get_blob_client(self.source_container_name, self.get_resource_name(TEST_BLOB_PREFIX)) blob_client.create_page_blob(size=length) blob_client.upload_page(data, offset=offset, length=length) return blob_client @@ -82,19 +81,19 @@ def _create_source_blob(self, bs, data, offset, length) -> BlobClient: def _wait_for_async_copy(self, blob): count = 0 props = blob.get_blob_properties() - while props.copy.status == 'pending': + while props.copy.status == "pending": count = count + 1 if count > 10: - self.fail('Timed out waiting for async copy to complete.') + self.fail("Timed out waiting for async copy to complete.") self.sleep(6) props = blob.get_blob_properties() return props - def _create_sparse_page_blob(self, bsc, size=1024*1024, data='') -> BlobClient: + def _create_sparse_page_blob(self, bsc, size=1024 * 1024, data="") -> BlobClient: blob_client = self._get_blob_reference(bsc) blob_client.create_page_blob(size=size) - range_start = 8*1024 + 512 + range_start = 8 * 1024 + 512 # the page blob will be super sparse like this:' some data ' blob_client.upload_page(data, offset=range_start, length=len(data)) @@ -118,7 +117,9 @@ def test_create_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -126,8 +127,8 @@ def test_create_blob(self, **kwargs): resp = blob.create_page_blob(1024) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert blob.get_blob_properties() @BlobPreparer() @@ -138,41 +139,50 @@ def test_create_blob_with_immutability_policy(self, **kwargs): storage_resource_group_name = kwargs.pop("storage_resource_group_name") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), credential=versioned_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + credential=versioned_storage_account_key, + max_page_size=4 * 1024, + ) self._setup(bsc) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = self.get_resource_name("vlwblob") blob = bsc.get_blob_client(container_name, blob_name) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - resp = blob.create_page_blob(1024, immutability_policy=immutability_policy, - legal_hold=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + resp = blob.create_page_blob(1024, immutability_policy=immutability_policy, legal_hold=True) props = blob.get_blob_properties() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert props['has_legal_hold'] - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert props["has_legal_hold"] + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None if self.is_live: blob.delete_immutability_policy() blob.set_legal_hold(False) blob.delete_blob() - mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -182,7 +192,11 @@ def test_create_page_blob_returns_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), credential=versioned_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + credential=versioned_storage_account_key, + max_page_size=4 * 1024, + ) self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -190,9 +204,9 @@ def test_create_page_blob_returns_vid(self, **kwargs): resp = blob.create_page_blob(1024) # Assert - assert resp['version_id'] is not None - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp["version_id"] is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert blob.get_blob_properties() @BlobPreparer() @@ -201,10 +215,12 @@ def test_create_blob_with_metadata(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act resp = blob.create_page_blob(512, metadata=metadata) @@ -219,10 +235,12 @@ def test_put_page_with_lease_id(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act data = self.get_random_bytes(512) @@ -238,19 +256,32 @@ def test_put_page_with_lease_id_and_if_tags(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} blob = self._create_blob(bsc, tags=tags) with pytest.raises(ResourceModifiedError): - blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', if_tags_match_condition="\"tag1\"='first tag'") - lease = blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + blob.acquire_lease( + lease_id="00000000-1111-2222-3333-444444444444", if_tags_match_condition="\"tag1\"='first tag'" + ) + lease = blob.acquire_lease( + lease_id="00000000-1111-2222-3333-444444444444", + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) # Act data = self.get_random_bytes(512) with pytest.raises(ResourceModifiedError): blob.upload_page(data, offset=0, length=512, lease=lease, if_tags_match_condition="\"tag1\"='first tag'") - blob.upload_page(data, offset=0, length=512, lease=lease, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + blob.upload_page( + data, + offset=0, + length=512, + lease=lease, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) page_ranges, cleared = blob.get_page_ranges() @@ -265,7 +296,9 @@ def test_update_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -274,9 +307,9 @@ def test_update_page(self, **kwargs): resp = blob.upload_page(data, offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) @BlobPreparer() @@ -285,7 +318,9 @@ def test_create_8tb_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -295,8 +330,8 @@ def test_create_8tb_blob(self, **kwargs): page_ranges, cleared = blob.get_page_ranges() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert isinstance(props, BlobProperties) assert props.size == EIGHT_TB assert 0 == len(page_ranges) @@ -307,7 +342,9 @@ def test_create_larger_than_8tb_blob_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -321,7 +358,9 @@ def test_update_8tb_blob_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) blob.create_page_blob(EIGHT_TB) @@ -335,14 +374,14 @@ def test_update_8tb_blob_page(self, **kwargs): page_ranges, cleared = blob.get_page_ranges() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None self.assertRangeEqual(self.container_name, blob.blob_name, data, start_offset, length, bsc) assert props.size == EIGHT_TB assert 1 == len(page_ranges) - assert page_ranges[0]['start'] == start_offset - assert page_ranges[0]['end'] == start_offset + length - 1 + assert page_ranges[0]["start"] == start_offset + assert page_ranges[0]["end"] == start_offset + length - 1 @BlobPreparer() @recorded_by_proxy @@ -350,7 +389,9 @@ def test_update_page_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -366,7 +407,9 @@ def test_clear_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -374,10 +417,10 @@ def test_clear_page(self, **kwargs): resp = blob.clear_page(offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None - self.assertBlobEqual(self.container_name, blob.blob_name, b'\x00' * 512, bsc) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None + self.assertBlobEqual(self.container_name, blob.blob_name, b"\x00" * 512, bsc) @BlobPreparer() @recorded_by_proxy @@ -385,7 +428,9 @@ def test_put_page_if_sequence_number_lt_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -405,7 +450,9 @@ def test_update_page_if_sequence_number_lt_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -424,7 +471,9 @@ def test_update_page_if_sequence_number_lte_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -443,7 +492,9 @@ def test_update_page_if_sequence_number_lte_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -462,7 +513,9 @@ def test_update_page_if_sequence_number_eq_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -481,7 +534,9 @@ def test_update_page_if_sequence_number_eq_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -498,17 +553,19 @@ def test_update_page_unicode(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act - data = u'abcdefghijklmnop' * 32 + data = "abcdefghijklmnop" * 32 resp = blob.upload_page(data, offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None @BlobPreparer() @recorded_by_proxy @@ -516,12 +573,15 @@ def test_upload_pages_from_url(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) source_blob_client_with_special_chars = self._create_source_blob_with_special_chars( - bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) + bsc, source_blob_data, 0, SOURCE_BLOB_SIZE + ) sas = self.generate_sas( generate_blob_sas, @@ -531,7 +591,8 @@ def test_upload_pages_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) sas_token_for_blob_with_special_chars = self.generate_sas( generate_blob_sas, @@ -541,41 +602,47 @@ def test_upload_pages_from_url(self, **kwargs): snapshot=source_blob_client_with_special_chars.snapshot, account_key=source_blob_client_with_special_chars.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls resp = destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, offset=0, length=4 * 1024, source_offset=0) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, offset=0, length=4 * 1024, source_offset=0 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None resp = destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, offset=4 * 1024, - length=4 * 1024, source_offset=4 * 1024) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, offset=4 * 1024, length=4 * 1024, source_offset=4 * 1024 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() assert blob_properties.size == SOURCE_BLOB_SIZE self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act: make update page from url calls source_with_special_chars_resp = destination_blob_client.upload_pages_from_url( - source_blob_client_with_special_chars.url + "?" + sas_token_for_blob_with_special_chars, offset=0, length=4 * 1024, source_offset=0) - assert source_with_special_chars_resp.get('etag') is not None - assert source_with_special_chars_resp.get('last_modified') is not None + source_blob_client_with_special_chars.url + "?" + sas_token_for_blob_with_special_chars, + offset=0, + length=4 * 1024, + source_offset=0, + ) + assert source_with_special_chars_resp.get("etag") is not None + assert source_with_special_chars_resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() assert blob_properties.size == SOURCE_BLOB_SIZE self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == source_with_special_chars_resp.get('etag') - assert blob_properties.get('last_modified') == source_with_special_chars_resp.get('last_modified') + assert blob_properties.get("etag") == source_with_special_chars_resp.get("etag") + assert blob_properties.get("last_modified") == source_with_special_chars_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -583,9 +650,13 @@ def test_upload_pages_from_url_with_oauth(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = "Bearer {}".format( + self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token + ) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) @@ -593,10 +664,12 @@ def test_upload_pages_from_url_with_oauth(self, **kwargs): # Assert failure without providing token with pytest.raises(HttpResponseError): destination_blob_client.upload_pages_from_url( - source_blob_client.url, offset=0, length=8 * 1024, source_offset=0) + source_blob_client.url, offset=0, length=8 * 1024, source_offset=0 + ) # Assert it works with oauth token destination_blob_client.upload_pages_from_url( - source_blob_client.url, offset=0, length=8 * 1024, source_offset=0, source_authorization=token) + source_blob_client.url, offset=0, length=8 * 1024, source_offset=0, source_authorization=token + ) destination_blob_data = destination_blob_client.download_blob().readall() assert source_blob_data == destination_blob_data @@ -607,7 +680,9 @@ def test_upload_pages_from_url_and_validate_content_md5(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -620,33 +695,37 @@ def test_upload_pages_from_url_and_validate_content_md5(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_content_md5=src_md5) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_content_md5=src_md5, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_content_md5=StorageContentValidation.get_content_md5( - b"POTATO")) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_content_md5=StorageContentValidation.get_content_md5(b"POTATO"), + ) @BlobPreparer() @recorded_by_proxy @@ -655,7 +734,9 @@ def test_upload_pages_from_url_with_source_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -668,35 +749,37 @@ def test_upload_pages_from_url_with_source_if_modified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_if_modified_since=source_properties.get('last_modified') - timedelta( - hours=15)) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_if_modified_since=source_properties.get("last_modified") - timedelta(hours=15), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_if_modified_since=source_properties.get( - 'last_modified')) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_if_modified_since=source_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy @@ -705,7 +788,9 @@ def test_upload_pages_from_url_with_source_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -718,34 +803,37 @@ def test_upload_pages_from_url_with_source_if_unmodified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_if_unmodified_since=source_properties.get('last_modified')) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_if_unmodified_since=source_properties.get("last_modified"), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_if_unmodified_since=source_properties.get('last_modified') - timedelta( - hours=15)) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_if_unmodified_since=source_properties.get("last_modified") - timedelta(hours=15), + ) @BlobPreparer() @recorded_by_proxy @@ -754,7 +842,9 @@ def test_upload_pages_from_url_with_source_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -767,35 +857,39 @@ def test_upload_pages_from_url_with_source_if_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_etag=source_properties.get('etag'), - source_match_condition=MatchConditions.IfNotModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfNotModified) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy @@ -804,7 +898,9 @@ def test_upload_pages_from_url_with_source_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -817,35 +913,39 @@ def test_upload_pages_from_url_with_source_if_none_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - source_etag=source_properties.get('etag'), - source_match_condition=MatchConditions.IfModified) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy @@ -854,7 +954,9 @@ def test_upload_pages_from_url_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -867,34 +969,37 @@ def test_upload_pages_from_url_with_if_modified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - if_modified_since=source_properties.get('last_modified') - timedelta( - minutes=15)) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + if_modified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - if_modified_since=blob_properties.get('last_modified')) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + if_modified_since=blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy @@ -903,7 +1008,9 @@ def test_upload_pages_from_url_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -916,35 +1023,38 @@ def test_upload_pages_from_url_with_if_unmodified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) destination_blob_properties = destination_blob_client.get_blob_properties() # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - offset=0, - length=SOURCE_BLOB_SIZE, - source_offset=0, - if_unmodified_since=destination_blob_properties.get('last_modified')) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + offset=0, + length=SOURCE_BLOB_SIZE, + source_offset=0, + if_unmodified_since=destination_blob_properties.get("last_modified"), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(ResourceModifiedError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_unmodified_since=source_properties.get('last_modified') - timedelta( - minutes=15)) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + if_unmodified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) @BlobPreparer() @recorded_by_proxy @@ -953,7 +1063,9 @@ def test_upload_pages_from_url_with_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -965,31 +1077,40 @@ def test_upload_pages_from_url_with_if_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) destination_blob_properties = destination_blob_client.get_blob_properties() # Act: make update page from url calls resp = destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - etag=destination_blob_properties.get('etag'), - match_condition=MatchConditions.IfNotModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag=destination_blob_properties.get("etag"), + match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified) + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy @@ -998,7 +1119,9 @@ def test_upload_pages_from_url_with_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -1010,36 +1133,40 @@ def test_upload_pages_from_url_with_if_none_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - etag='0x111111111111111', - match_condition=MatchConditions.IfModified) + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag="0x111111111111111", + match_condition=MatchConditions.IfModified, + ) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - etag=blob_properties.get('etag'), - match_condition=MatchConditions.IfModified) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag=blob_properties.get("etag"), + match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy @@ -1048,7 +1175,9 @@ def test_upload_pages_from_url_with_sequence_number_lt(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1061,33 +1190,29 @@ def test_upload_pages_from_url_with_sequence_number_lt(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lt=start_sequence + 1) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lt=start_sequence + 1 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lt=start_sequence) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lt=start_sequence + ) @BlobPreparer() @recorded_by_proxy @@ -1096,7 +1221,9 @@ def test_upload_pages_from_url_with_sequence_number_lte(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1109,33 +1236,29 @@ def test_upload_pages_from_url_with_sequence_number_lte(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lte=start_sequence) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lte=start_sequence + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lte=start_sequence - 1) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lte=start_sequence - 1 + ) @BlobPreparer() @recorded_by_proxy @@ -1144,7 +1267,9 @@ def test_upload_pages_from_url_with_sequence_number_eq(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1157,33 +1282,29 @@ def test_upload_pages_from_url_with_sequence_number_eq(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = self._create_blob(bsc, length=SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_eq=start_sequence) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_eq=start_sequence + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = destination_blob_client.get_blob_properties() self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with failing condition with pytest.raises(HttpResponseError): - destination_blob_client \ - .upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_eq=start_sequence + 1) + destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_eq=start_sequence + 1 + ) @BlobPreparer() @recorded_by_proxy @@ -1196,7 +1317,7 @@ def test_list_page_ranges(self, **kwargs): blob = self._create_blob(bsc, length=2560) data = self.get_random_bytes(512) blob.upload_page(data, offset=0, length=512) - blob.upload_page(data*2, offset=1024, length=1024) + blob.upload_page(data * 2, offset=1024, length=1024) # Act ranges = list(blob.list_page_ranges()) @@ -1299,7 +1420,7 @@ def test_list_page_ranges_diff(self, **kwargs): # Act ranges1 = list(blob.list_page_ranges(previous_snapshot=snapshot1)) - ranges2 = list(blob.list_page_ranges(previous_snapshot=snapshot2['snapshot'])) + ranges2 = list(blob.list_page_ranges(previous_snapshot=snapshot2["snapshot"])) # Assert assert ranges1 is not None @@ -1353,7 +1474,9 @@ def test_get_page_ranges_no_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) @@ -1371,7 +1494,9 @@ def test_get_page_ranges_2_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc, length=2048) data = self.get_random_bytes(512) @@ -1385,10 +1510,10 @@ def test_get_page_ranges_2_pages(self, **kwargs): assert ranges is not None assert isinstance(ranges, list) assert len(ranges) == 2 - assert ranges[0]['start'] == 0 - assert ranges[0]['end'] == 511 - assert ranges[1]['start'] == 1024 - assert ranges[1]['end'] == 1535 + assert ranges[0]["start"] == 0 + assert ranges[0]["end"] == 511 + assert ranges[1]["start"] == 1024 + assert ranges[1]["end"] == 1535 @BlobPreparer() @recorded_by_proxy @@ -1396,7 +1521,9 @@ def test_get_page_ranges_diff(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc, length=2048) data = self.get_random_bytes(1536) @@ -1407,7 +1534,7 @@ def test_get_page_ranges_diff(self, **kwargs): # Act ranges1, cleared1 = blob.get_page_ranges(previous_snapshot_diff=snapshot1) - ranges2, cleared2 = blob.get_page_ranges(previous_snapshot_diff=snapshot2['snapshot']) + ranges2, cleared2 = blob.get_page_ranges(previous_snapshot_diff=snapshot2["snapshot"]) # Assert assert ranges1 is not None @@ -1415,20 +1542,20 @@ def test_get_page_ranges_diff(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 @pytest.mark.playback_test_only @BlobPreparer() @@ -1448,7 +1575,7 @@ def test_get_page_range_diff_for_managed_disk(self, **kwargs): data = self.get_random_bytes(1536) snapshot1 = blob.create_snapshot() - snapshot_blob1 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot1['snapshot']) + snapshot_blob1 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot1["snapshot"]) sas_token1 = self.generate_sas( generate_blob_sas, snapshot_blob1.account_name, @@ -1462,7 +1589,7 @@ def test_get_page_range_diff_for_managed_disk(self, **kwargs): blob.upload_page(data, offset=0, length=1536) snapshot2 = blob.create_snapshot() - snapshot_blob2 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot2['snapshot']) + snapshot_blob2 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot2["snapshot"]) sas_token2 = self.generate_sas( generate_blob_sas, snapshot_blob2.account_name, @@ -1477,8 +1604,8 @@ def test_get_page_range_diff_for_managed_disk(self, **kwargs): blob.clear_page(offset=512, length=512) # Act - ranges1, cleared1 = blob.get_page_range_diff_for_managed_disk(snapshot_blob1.url + '&' + sas_token1) - ranges2, cleared2 = blob.get_page_range_diff_for_managed_disk(snapshot_blob2.url + '&' + sas_token2) + ranges1, cleared1 = blob.get_page_range_diff_for_managed_disk(snapshot_blob1.url + "&" + sas_token1) + ranges2, cleared2 = blob.get_page_range_diff_for_managed_disk(snapshot_blob2.url + "&" + sas_token2) # Assert assert ranges1 is not None @@ -1486,20 +1613,20 @@ def test_get_page_range_diff_for_managed_disk(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 @BlobPreparer() @recorded_by_proxy @@ -1507,7 +1634,9 @@ def test_update_page_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc, length=2048) data = self.get_random_bytes(512) @@ -1523,7 +1652,9 @@ def test_resize_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc, length=1024) @@ -1531,9 +1662,9 @@ def test_resize_blob(self, **kwargs): resp = blob.resize_blob(512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None props = blob.get_blob_properties() assert isinstance(props, BlobProperties) assert props.size == 512 @@ -1544,17 +1675,19 @@ def test_set_sequence_number_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._create_blob(bsc) # Act resp = blob.set_sequence_number(SequenceNumberAction.Update, 6) - #Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + # Assert + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None props = blob.get_blob_properties() assert isinstance(props, BlobProperties) assert props.page_blob_sequence_number == 6 @@ -1565,33 +1698,29 @@ def test_create_page_blob_with_no_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) - data1 = b'1234' * 128 - data2 = b'1234' * 256 + data1 = b"1234" * 128 + data2 = b"1234" * 256 # Act create_resp = blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data1'}) + data1, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data1"} + ) with pytest.raises(ResourceExistsError): - blob.upload_blob( - data2, - overwrite=False, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data2'}) + blob.upload_blob(data2, overwrite=False, blob_type=BlobType.PageBlob, metadata={"blobdata": "data2"}) props = blob.get_blob_properties() # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data1, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') - assert props.metadata == {'blobdata': 'data1'} + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") + assert props.metadata == {"blobdata": "data1"} assert props.size == len(data1) assert props.blob_type == BlobType.PageBlob @@ -1601,31 +1730,29 @@ def test_create_page_blob_with_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) - data1 = b'1234' * 128 - data2 = b'1234' * 256 + data1 = b"1234" * 128 + data2 = b"1234" * 256 # Act create_resp = blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data1'}) + data1, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data1"} + ) update_resp = blob.upload_blob( - data2, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data2'}) + data2, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data2"} + ) props = blob.get_blob_properties() # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data2, bsc) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'data2'} + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "data2"} assert props.size == len(data2) assert props.blob_type == BlobType.PageBlob @@ -1635,7 +1762,9 @@ def test_create_blob_from_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1646,8 +1775,8 @@ def test_create_blob_from_bytes(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1655,7 +1784,9 @@ def test_create_blob_from_0_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(0) @@ -1666,8 +1797,8 @@ def test_create_blob_from_0_bytes(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1675,27 +1806,29 @@ def test_create_blob_from_bytes_with_progress_first(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) - create_resp = blob.upload_blob( - data, blob_type=BlobType.PageBlob, raw_response_hook=callback) + create_resp = blob.upload_blob(data, blob_type=BlobType.PageBlob, raw_response_hook=callback) props = blob.get_blob_properties() # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") self.assert_upload_progress(LARGE_BLOB_SIZE, self.config.max_page_size, progress) @BlobPreparer() @@ -1704,7 +1837,9 @@ def test_create_blob_from_bytes_with_index(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1722,7 +1857,9 @@ def test_create_blob_from_bytes_with_index_and_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1734,9 +1871,9 @@ def test_create_blob_from_bytes_with_index_and_count(self, **kwargs): props = blob.get_blob_properties() # Assert - self.assertBlobEqual(self.container_name, blob.blob_name, data[index:index + count], bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + self.assertBlobEqual(self.container_name, blob.blob_name, data[index : index + count], bsc) + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1744,7 +1881,9 @@ def test_create_blob_from_path(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1758,8 +1897,8 @@ def test_create_blob_from_path(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1767,16 +1906,19 @@ def test_create_blob_from_path_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1795,7 +1937,9 @@ def test_create_blob_from_stream(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1810,8 +1954,8 @@ def test_create_blob_from_stream(self, **kwargs): # Assert self.assertBlobEqual(self.container_name, blob.blob_name, data[:blob_size], bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1819,13 +1963,15 @@ def test_create_blob_from_stream_with_empty_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) # data is almost all empty (0s) except two ranges blob = self._get_blob_reference(bsc) data = bytearray(16 * 1024) - data[512: 1024] = self.get_random_bytes(512) - data[8192: 8196] = self.get_random_bytes(4) + data[512:1024] = self.get_random_bytes(512) + data[8192:8196] = self.get_random_bytes(4) # Act blob_size = len(data) @@ -1840,12 +1986,12 @@ def test_create_blob_from_stream_with_empty_pages(self, **kwargs): self.assertBlobEqual(self.container_name, blob.blob_name, data[:blob_size], bsc) page_ranges, cleared = list(blob.get_page_ranges()) assert len(page_ranges) == 2 - assert page_ranges[0]['start'] == 0 - assert page_ranges[0]['end'] == 4095 - assert page_ranges[1]['start'] == 8192 - assert page_ranges[1]['end'] == 12287 - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert page_ranges[0]["start"] == 0 + assert page_ranges[0]["end"] == 4095 + assert page_ranges[1]["start"] == 8192 + assert page_ranges[1]["end"] == 12287 + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy @@ -1853,7 +1999,9 @@ def test_create_blob_from_stream_non_seekable(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1875,16 +2023,19 @@ def test_create_blob_from_stream_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1904,7 +2055,9 @@ def test_create_blob_from_stream_truncated(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1925,16 +2078,19 @@ def test_create_blob_from_stream_with_progress_truncated(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1954,7 +2110,9 @@ def test_create_blob_with_md5_small(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -1970,7 +2128,9 @@ def test_create_blob_with_md5_large(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1986,7 +2146,9 @@ def test_incremental_copy_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) source_blob = self._create_blob(bsc, length=2048) data = self.get_random_bytes(512) @@ -1995,7 +2157,8 @@ def test_incremental_copy_blob(self, **kwargs): source_snapshot_blob = source_blob.create_snapshot() snapshot_blob = BlobClient.from_blob_url( - source_blob.url, credential=source_blob.credential, snapshot=source_snapshot_blob) + source_blob.url, credential=source_blob.credential, snapshot=source_snapshot_blob + ) sas_token = self.generate_sas( generate_blob_sas, snapshot_blob.account_name, @@ -2009,16 +2172,16 @@ def test_incremental_copy_blob(self, **kwargs): sas_blob = BlobClient.from_blob_url(snapshot_blob.url, credential=sas_token) # Act - dest_blob = bsc.get_blob_client(self.container_name, 'dest_blob') + dest_blob = bsc.get_blob_client(self.container_name, "dest_blob") copy = dest_blob.start_copy_from_url(sas_blob.url, incremental_copy=True) # Assert assert copy is not None - assert copy['copy_id'] is not None - assert copy['copy_status'] == 'pending' + assert copy["copy_id"] is not None + assert copy["copy_status"] == "pending" copy_blob = self._wait_for_async_copy(dest_blob) - assert copy_blob.copy.status == 'success' + assert copy_blob.copy.status == "success" assert copy_blob.copy.destination_snapshot is not None # strip off protocol @@ -2030,14 +2193,18 @@ def test_blob_tier_on_create(self, **kwargs): premium_storage_account_name = kwargs.pop("premium_storage_account_name") premium_storage_account_key = kwargs.pop("premium_storage_account_key") - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) self._setup(bsc) url = self.account_url(premium_storage_account_name, "blob") credential = premium_storage_account_key pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: container.create_container() @@ -2056,10 +2223,8 @@ def test_blob_tier_on_create(self, **kwargs): pblob2 = pbs.get_blob_client(container_name, blob2.blob_name) byte_data = self.get_random_bytes(1024) pblob2.upload_blob( - byte_data, - premium_page_blob_tier=PremiumPageBlobTier.P6, - blob_type=BlobType.PageBlob, - overwrite=True) + byte_data, premium_page_blob_tier=PremiumPageBlobTier.P6, blob_type=BlobType.PageBlob, overwrite=True + ) props2 = pblob2.get_blob_properties() assert props2.blob_tier == PremiumPageBlobTier.P6 @@ -2071,7 +2236,12 @@ def test_blob_tier_on_create(self, **kwargs): with tempfile.TemporaryFile() as temp_file: temp_file.write(byte_data) temp_file.seek(0) - pblob3.upload_blob(temp_file, blob_type=BlobType.PageBlob, premium_page_blob_tier=PremiumPageBlobTier.P10, overwrite=True) + pblob3.upload_blob( + temp_file, + blob_type=BlobType.PageBlob, + premium_page_blob_tier=PremiumPageBlobTier.P10, + overwrite=True, + ) props3 = pblob3.get_blob_properties() assert props3.blob_tier == PremiumPageBlobTier.P10 @@ -2086,14 +2256,18 @@ def test_blob_tier_set_tier_api(self, **kwargs): premium_storage_account_name = kwargs.pop("premium_storage_account_name") premium_storage_account_key = kwargs.pop("premium_storage_account_key") - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) self._setup(bsc) url = self.account_url(premium_storage_account_name, "blob") credential = premium_storage_account_key pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: @@ -2148,7 +2322,7 @@ def test_blob_tier_copy_blob(self, **kwargs): pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: @@ -2157,49 +2331,49 @@ def test_blob_tier_copy_blob(self, **kwargs): except ResourceExistsError: pass - source_blob = pbs.get_blob_client( - container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + source_blob = pbs.get_blob_client(container_name, self.get_resource_name(TEST_BLOB_PREFIX)) source_blob.create_page_blob(1024, premium_page_blob_tier=PremiumPageBlobTier.P10) # Act - source_blob_url = '{0}/{1}/{2}'.format( - self.account_url(premium_storage_account_name, "blob"), container_name, source_blob.blob_name) + source_blob_url = "{0}/{1}/{2}".format( + self.account_url(premium_storage_account_name, "blob"), container_name, source_blob.blob_name + ) - copy_blob = pbs.get_blob_client(container_name, 'blob1copy') + copy_blob = pbs.get_blob_client(container_name, "blob1copy") copy = copy_blob.start_copy_from_url(source_blob_url, premium_page_blob_tier=PremiumPageBlobTier.P30) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert copy["copy_id"] is not None copy_ref = copy_blob.get_blob_properties() assert copy_ref.blob_tier == PremiumPageBlobTier.P30 - source_blob2 = pbs.get_blob_client( - container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + source_blob2 = pbs.get_blob_client(container_name, self.get_resource_name(TEST_BLOB_PREFIX)) source_blob2.create_page_blob(1024) - source_blob2_url = '{0}/{1}/{2}'.format( - self.account_url(premium_storage_account_name, "blob"), source_blob2.container_name, source_blob2.blob_name) + source_blob2_url = "{0}/{1}/{2}".format( + self.account_url(premium_storage_account_name, "blob"), + source_blob2.container_name, + source_blob2.blob_name, + ) - copy_blob2 = pbs.get_blob_client(container_name, 'blob2copy') + copy_blob2 = pbs.get_blob_client(container_name, "blob2copy") copy2 = copy_blob2.start_copy_from_url(source_blob2_url, premium_page_blob_tier=PremiumPageBlobTier.P60) assert copy2 is not None - assert copy2['copy_status'] == 'success' - assert copy2['copy_id'] is not None + assert copy2["copy_status"] == "success" + assert copy2["copy_id"] is not None copy_ref2 = copy_blob2.get_blob_properties() assert copy_ref2.blob_tier == PremiumPageBlobTier.P60 assert not copy_ref2.blob_tier_inferred - copy_blob3 = pbs.get_blob_client(container_name, 'blob3copy') + copy_blob3 = pbs.get_blob_client(container_name, "blob3copy") copy3 = copy_blob3.start_copy_from_url(source_blob2_url) assert copy3 is not None - assert copy3['copy_status'] == 'success' - assert copy3['copy_id'] is not None + assert copy3["copy_status"] == "success" + assert copy3["copy_id"] is not None copy_ref3 = copy_blob3.get_blob_properties() assert copy_ref3.blob_tier == PremiumPageBlobTier.P10 @@ -2214,9 +2388,11 @@ def test_download_sparse_page_blob_non_parallel(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) - self.config.max_single_get_size = 4*1024 + self.config.max_single_get_size = 4 * 1024 self.config.max_chunk_get_size = 1024 sparse_page_blob_size = 1024 * 1024 @@ -2225,24 +2401,24 @@ def test_download_sparse_page_blob_non_parallel(self, **kwargs): # Act page_ranges, cleared = blob_client.get_page_ranges() - start = page_ranges[0]['start'] - end = page_ranges[0]['end'] + start = page_ranges[0]["start"] + end = page_ranges[0]["end"] content = blob_client.download_blob().readall() # Assert assert sparse_page_blob_size == len(content) # make sure downloaded data is the same as the uploaded data - assert data == content[start: end + 1] + assert data == content[start : end + 1] # assert all unlisted ranges are empty - for byte in content[:start-1]: + for byte in content[: start - 1]: try: - assert byte == '\x00' + assert byte == "\x00" except: assert byte == 0 - for byte in content[end+1:]: + for byte in content[end + 1 :]: try: - assert byte == '\x00' + assert byte == "\x00" except: assert byte == 0 @@ -2253,7 +2429,9 @@ def test_download_sparse_page_blob_parallel(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) self._setup(bsc) self.config.max_single_get_size = 4 * 1024 self.config.max_chunk_get_size = 1024 @@ -2264,8 +2442,8 @@ def test_download_sparse_page_blob_parallel(self, **kwargs): # Act page_ranges, cleared = blob_client.get_page_ranges() - start = page_ranges[0]['start'] - end = page_ranges[0]['end'] + start = page_ranges[0]["start"] + end = page_ranges[0]["end"] content = blob_client.download_blob(max_concurrency=3).readall() @@ -2287,7 +2465,7 @@ def test_download_sparse_page_blob_uneven_chunks(self, **kwargs): blob_client = self._get_blob_reference(bsc) blob_client.create_page_blob(sparse_page_blob_size) - data = b'12345678' * 128 # 1024 bytes + data = b"12345678" * 128 # 1024 bytes range_start = 2 * 1024 + 512 blob_client.upload_page(data, offset=range_start, length=len(data)) @@ -2302,10 +2480,10 @@ def test_download_sparse_page_blob_uneven_chunks(self, **kwargs): start = r.start end = r.end - assert data == content[start: end + 1] - for byte in content[:start - 1]: + assert data == content[start : end + 1] + for byte in content[: start - 1]: assert byte == 0 - for byte in content[end + 1:]: + for byte in content[end + 1 :]: assert byte == 0 @BlobPreparer() @@ -2318,23 +2496,23 @@ def test_upload_progress_chunked_non_parallel(self, **kwargs): self._setup(bsc) blob_name = self.get_resource_name(TEST_BLOB_PREFIX) - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_page_size=1024) + max_single_put_size=1024, + max_page_size=1024, + ) blob_client.upload_blob( - data, - blob_type=BlobType.PageBlob, - overwrite=True, - max_concurrency=1, - progress_hook=progress.assert_progress) + data, blob_type=BlobType.PageBlob, overwrite=True, max_concurrency=1, progress_hook=progress.assert_progress + ) # Assert progress.assert_complete() @@ -2350,23 +2528,23 @@ def test_upload_progress_chunked_parallel(self, **kwargs): self._setup(bsc) blob_name = self.get_resource_name(TEST_BLOB_PREFIX) - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_page_size=1024) + max_single_put_size=1024, + max_page_size=1024, + ) blob_client.upload_blob( - data, - blob_type=BlobType.PageBlob, - overwrite=True, - max_concurrency=3, - progress_hook=progress.assert_progress) + data, blob_type=BlobType.PageBlob, overwrite=True, max_concurrency=3, progress_hook=progress.assert_progress + ) # Assert progress.assert_complete() diff --git a/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py index d280130a508a..e091720f75a5 100644 --- a/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py @@ -21,7 +21,8 @@ ImmutabilityPolicy, PremiumPageBlobTier, SequenceNumberAction, - generate_blob_sas) + generate_blob_sas, +) from azure.storage.blob.aio import BlobClient, BlobServiceClient from azure.storage.blob._shared.policies import StorageContentValidation @@ -32,7 +33,7 @@ # ------------------------------------------------------------------------------ -TEST_BLOB_PREFIX = 'blob' +TEST_BLOB_PREFIX = "blob" LARGE_BLOB_SIZE = 10 * 1024 + 512 EIGHT_TB = 8 * 1024 * 1024 * 1024 * 1024 SOURCE_BLOB_SIZE = 8 * 1024 @@ -44,8 +45,8 @@ class TestStoragePageBlobAsync(AsyncStorageRecordedTestCase): async def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utcontainer') - self.source_container_name = self.get_resource_name('utcontainersource') + self.container_name = self.get_resource_name("utcontainer") + self.source_container_name = self.get_resource_name("utcontainersource") if self.is_live: try: await bsc.create_container(self.container_name) @@ -57,9 +58,7 @@ async def _setup(self, bsc): pass def _get_blob_reference(self, bsc) -> BlobClient: - return bsc.get_blob_client( - self.container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + return bsc.get_blob_client(self.container_name, self.get_resource_name(TEST_BLOB_PREFIX)) async def _create_blob(self, bsc, length=512, sequence_number=None, tags=None) -> BlobClient: blob = self._get_blob_reference(bsc) @@ -67,17 +66,16 @@ async def _create_blob(self, bsc, length=512, sequence_number=None, tags=None) - return blob async def _create_source_blob(self, bs, data, offset, length) -> BlobClient: - blob_client = bs.get_blob_client(self.source_container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + blob_client = bs.get_blob_client(self.source_container_name, self.get_resource_name(TEST_BLOB_PREFIX)) await blob_client.create_page_blob(size=length) await blob_client.upload_page(data, offset=offset, length=length) return blob_client - async def _create_sparse_page_blob(self, bsc, size=1024*1024, data='') -> BlobClient: + async def _create_sparse_page_blob(self, bsc, size=1024 * 1024, data="") -> BlobClient: blob_client = self._get_blob_reference(bsc) await blob_client.create_page_blob(size=size) - range_start = 8*1024 + 512 + range_start = 8 * 1024 + 512 # the page blob will be super sparse like this # :'start some data end ' @@ -88,10 +86,10 @@ async def _create_sparse_page_blob(self, bsc, size=1024*1024, data='') -> BlobCl async def _wait_for_async_copy(self, blob): count = 0 props = await blob.get_blob_properties() - while props.copy.status == 'pending': + while props.copy.status == "pending": count = count + 1 if count > 10: - self.fail('Timed out waiting for async copy to complete.') + self.fail("Timed out waiting for async copy to complete.") self.sleep(6) props = await blob.get_blob_properties() return props @@ -119,11 +117,13 @@ async def test_upload_pages_from_url_with_oauth(self, **kwargs): # Arrange account_url = self.account_url(storage_account_name, "blob") if not isinstance(account_url, str): - account_url = account_url.encode('utf-8') - storage_account_key = storage_account_key.encode('utf-8') + account_url = account_url.encode("utf-8") + storage_account_key = storage_account_key.encode("utf-8") bsc = BlobServiceClient(account_url, credential=storage_account_key, max_page_size=4 * 1024) await self._setup(bsc) - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token( + "https://storage.azure.com/.default" + ) token = "Bearer {}".format(access_token.token) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -132,10 +132,12 @@ async def test_upload_pages_from_url_with_oauth(self, **kwargs): # Assert failure without providing token with pytest.raises(HttpResponseError): await destination_blob_client.upload_pages_from_url( - source_blob_client.url, offset=0, length=8 * 1024, source_offset=0) + source_blob_client.url, offset=0, length=8 * 1024, source_offset=0 + ) # Assert it works with oauth token await destination_blob_client.upload_pages_from_url( - source_blob_client.url, offset=0, length=8 * 1024, source_offset=0, source_authorization=token) + source_blob_client.url, offset=0, length=8 * 1024, source_offset=0, source_authorization=token + ) # Assert destination blob has right content destination_blob = await destination_blob_client.download_blob() destination_blob_data = await destination_blob.readall() @@ -147,7 +149,9 @@ async def test_create_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -155,8 +159,8 @@ async def test_create_blob(self, **kwargs): resp = await blob.create_page_blob(1024) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert await blob.get_blob_properties() @BlobPreparer() @@ -167,42 +171,50 @@ async def test_create_blob_with_immutability_policy(self, **kwargs): storage_resource_group_name = kwargs.pop("storage_resource_group_name") variables = kwargs.pop("variables", {}) - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), credential=versioned_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + credential=versioned_storage_account_key, + max_page_size=4 * 1024, + ) await self._setup(bsc) - container_name = self.get_resource_name('vlwcontainer') + container_name = self.get_resource_name("vlwcontainer") if self.is_live: token_credential = self.get_credential(BlobServiceClient, is_async=True) subscription_id = self.get_settings_value("SUBSCRIPTION_ID") - mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01') + mgmt_client = StorageManagementClient(token_credential, subscription_id, "2021-04-01") property = mgmt_client.models().BlobContainer( - immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True)) - await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property) + immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True) + ) + await mgmt_client.blob_containers.create( + storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property + ) blob_name = self.get_resource_name("vlwblob") blob = bsc.get_blob_client(container_name, blob_name) # Act - expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5)) - immutability_policy = ImmutabilityPolicy(expiry_time=expiry_time, - policy_mode=BlobImmutabilityPolicyMode.Unlocked) - resp = await blob.create_page_blob(1024, - immutability_policy=immutability_policy, - legal_hold=True) + expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(seconds=5)) + immutability_policy = ImmutabilityPolicy( + expiry_time=expiry_time, policy_mode=BlobImmutabilityPolicyMode.Unlocked + ) + resp = await blob.create_page_blob(1024, immutability_policy=immutability_policy, legal_hold=True) props = await blob.get_blob_properties() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert props['has_legal_hold'] - assert props['immutability_policy']['expiry_time'] is not None - assert props['immutability_policy']['policy_mode'] is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert props["has_legal_hold"] + assert props["immutability_policy"]["expiry_time"] is not None + assert props["immutability_policy"]["policy_mode"] is not None if self.is_live: await blob.delete_immutability_policy() await blob.set_legal_hold(False) await blob.delete_blob() - await mgmt_client.blob_containers.delete(storage_resource_group_name, versioned_storage_account_name, container_name) + await mgmt_client.blob_containers.delete( + storage_resource_group_name, versioned_storage_account_name, container_name + ) return variables @@ -212,7 +224,11 @@ async def test_create_page_blob_returns_vid(self, **kwargs): versioned_storage_account_name = kwargs.pop("versioned_storage_account_name") versioned_storage_account_key = kwargs.pop("versioned_storage_account_key") - bsc = BlobServiceClient(self.account_url(versioned_storage_account_name, "blob"), credential=versioned_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(versioned_storage_account_name, "blob"), + credential=versioned_storage_account_key, + max_page_size=4 * 1024, + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -220,9 +236,9 @@ async def test_create_page_blob_returns_vid(self, **kwargs): resp = await blob.create_page_blob(1024) # Assert - assert resp['version_id'] is not None - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp["version_id"] is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert await blob.get_blob_properties() @BlobPreparer() @@ -231,11 +247,13 @@ async def test_create_blob_with_metadata(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) # Arrange await self._setup(bsc) blob = self._get_blob_reference(bsc) - metadata = {'hello': 'world', 'number': '42'} + metadata = {"hello": "world", "number": "42"} # Act resp = await blob.create_page_blob(512, metadata=metadata) @@ -250,10 +268,12 @@ async def test_put_page_with_lease_id(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444') + lease = await blob.acquire_lease(lease_id="00000000-1111-2222-3333-444444444444") # Act data = self.get_random_bytes(512) @@ -270,19 +290,34 @@ async def test_put_page_with_lease_id_and_if_tags(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) tags = {"tag1 name": "my tag", "tag2": "secondtag", "tag3": "thirdtag"} blob = await self._create_blob(bsc, tags=tags) with pytest.raises(ResourceModifiedError): - await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', if_tags_match_condition="\"tag1\"='first tag'") - lease = await blob.acquire_lease(lease_id='00000000-1111-2222-3333-444444444444', if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + await blob.acquire_lease( + lease_id="00000000-1111-2222-3333-444444444444", if_tags_match_condition="\"tag1\"='first tag'" + ) + lease = await blob.acquire_lease( + lease_id="00000000-1111-2222-3333-444444444444", + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) # Act data = self.get_random_bytes(512) with pytest.raises(ResourceModifiedError): - await blob.upload_page(data, offset=0, length=512, lease=lease, if_tags_match_condition="\"tag1\"='first tag'") - await blob.upload_page(data, offset=0, length=512, lease=lease, if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'") + await blob.upload_page( + data, offset=0, length=512, lease=lease, if_tags_match_condition="\"tag1\"='first tag'" + ) + await blob.upload_page( + data, + offset=0, + length=512, + lease=lease, + if_tags_match_condition="\"tag1 name\"='my tag' AND \"tag2\"='secondtag'", + ) page_ranges, cleared = await blob.get_page_ranges() @@ -297,7 +332,9 @@ async def test_update_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -306,9 +343,9 @@ async def test_update_page(self, **kwargs): resp = await blob.upload_page(data, offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None await self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) @BlobPreparer() @@ -317,7 +354,9 @@ async def test_create_8tb_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -327,8 +366,8 @@ async def test_create_8tb_blob(self, **kwargs): page_ranges, cleared = await blob.get_page_ranges() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None assert isinstance(props, BlobProperties) assert props.size == EIGHT_TB assert 0 == len(page_ranges) @@ -339,7 +378,9 @@ async def test_create_larger_than_8tb_blob_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) @@ -353,7 +394,9 @@ async def test_update_8tb_blob_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) await blob.create_page_blob(EIGHT_TB) @@ -367,14 +410,14 @@ async def test_update_8tb_blob_page(self, **kwargs): page_ranges, cleared = await blob.get_page_ranges() # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None await self.assertRangeEqual(self.container_name, blob.blob_name, data, start_offset, length, bsc) assert props.size == EIGHT_TB assert 1 == len(page_ranges) - assert page_ranges[0]['start'] == start_offset - assert page_ranges[0]['end'] == start_offset + length - 1 + assert page_ranges[0]["start"] == start_offset + assert page_ranges[0]["end"] == start_offset + length - 1 @BlobPreparer() @recorded_by_proxy_async @@ -382,7 +425,9 @@ async def test_update_page_with_md5(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -397,17 +442,19 @@ async def test_clear_page(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act resp = await blob.clear_page(offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None - await self.assertBlobEqual(self.container_name, blob.blob_name, b'\x00' * 512, bsc) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None + await self.assertBlobEqual(self.container_name, blob.blob_name, b"\x00" * 512, bsc) @BlobPreparer() @recorded_by_proxy_async @@ -415,7 +462,9 @@ async def test_put_page_if_sequence_number_lt_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -435,7 +484,9 @@ async def test_update_page_if_sequence_number_lt_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -454,7 +505,9 @@ async def test_update_page_if_sequence_number_lte_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -473,7 +526,9 @@ async def test_update_page_if_sequence_number_lte_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -492,7 +547,9 @@ async def test_update_page_if_sequence_number_eq_success(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -511,7 +568,9 @@ async def test_update_page_if_sequence_number_eq_failure(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -531,7 +590,9 @@ async def test_upload_pages_from_url(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -543,26 +604,29 @@ async def test_upload_pages_from_url(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls resp = await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, offset=0, length=4 * 1024, source_offset=0) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, offset=0, length=4 * 1024, source_offset=0 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, offset=4 * 1024, - length=4 * 1024, source_offset=4 * 1024) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, offset=4 * 1024, length=4 * 1024, source_offset=4 * 1024 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -571,7 +635,9 @@ async def test_upload_pages_from_url_and_validate_content_md5(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -584,32 +650,33 @@ async def test_upload_pages_from_url_and_validate_content_md5(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - source_content_md5=src_md5) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, source_content_md5=src_md5 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - source_content_md5=StorageContentValidation.get_content_md5( - b"POTATO")) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_content_md5=StorageContentValidation.get_content_md5(b"POTATO"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -618,7 +685,9 @@ async def test_upload_pages_from_url_with_source_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -631,34 +700,37 @@ async def test_upload_pages_from_url_with_source_if_modified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - source_if_modified_since=source_properties.get( - 'last_modified') - timedelta( - hours=15)) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_if_modified_since=source_properties.get("last_modified") - timedelta(hours=15), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - source_if_modified_since=source_properties.get( - 'last_modified')) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_if_modified_since=source_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -667,7 +739,9 @@ async def test_upload_pages_from_url_with_source_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -680,34 +754,37 @@ async def test_upload_pages_from_url_with_source_if_unmodified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - source_if_unmodified_since=source_properties.get( - 'last_modified')) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_if_unmodified_since=source_properties.get("last_modified"), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - source_if_unmodified_since=source_properties.get( - 'last_modified') - timedelta( - hours=15)) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_if_unmodified_since=source_properties.get("last_modified") - timedelta(hours=15), + ) @BlobPreparer() @recorded_by_proxy_async @@ -716,7 +793,9 @@ async def test_upload_pages_from_url_with_source_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -729,30 +808,39 @@ async def test_upload_pages_from_url_with_source_if_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls resp = await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - source_etag=source_properties.get('etag'), - source_match_condition=MatchConditions.IfNotModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - source_etag='0x111111111111111', - source_match_condition=MatchConditions.IfNotModified) + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -761,7 +849,9 @@ async def test_upload_pages_from_url_with_source_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -774,28 +864,39 @@ async def test_upload_pages_from_url_with_source_if_none_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls resp = await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - source_etag='0x111111111111111', source_match_condition=MatchConditions.IfModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_etag="0x111111111111111", + source_match_condition=MatchConditions.IfModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - source_etag=source_properties.get('etag'), source_match_condition=MatchConditions.IfModified) + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + source_etag=source_properties.get("etag"), + source_match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -804,7 +905,9 @@ async def test_upload_pages_from_url_with_if_modified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -817,34 +920,37 @@ async def test_upload_pages_from_url_with_if_modified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_modified_since=source_properties.get( - 'last_modified') - timedelta( - minutes=15)) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + if_modified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_modified_since=blob_properties.get( - 'last_modified')) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + if_modified_since=blob_properties.get("last_modified"), + ) @BlobPreparer() @recorded_by_proxy_async @@ -853,7 +959,9 @@ async def test_upload_pages_from_url_with_if_unmodified(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -866,34 +974,37 @@ async def test_upload_pages_from_url_with_if_unmodified(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_unmodified_since=source_properties.get( - 'last_modified') + timedelta(minutes=15)) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + if_unmodified_since=source_properties.get("last_modified") + timedelta(minutes=15), + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_unmodified_since=source_properties.get( - 'last_modified') - timedelta( - minutes=15)) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + if_unmodified_since=source_properties.get("last_modified") - timedelta(minutes=15), + ) @BlobPreparer() @recorded_by_proxy_async @@ -902,7 +1013,9 @@ async def test_upload_pages_from_url_with_if_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -914,31 +1027,40 @@ async def test_upload_pages_from_url_with_if_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) destination_blob_properties = await destination_blob_client.get_blob_properties() # Act: make update page from url calls resp = await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - etag=destination_blob_properties.get('etag'), - match_condition=MatchConditions.IfNotModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag=destination_blob_properties.get("etag"), + match_condition=MatchConditions.IfNotModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): await destination_blob_client.upload_pages_from_url( - source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, - etag='0x111111111111111', - match_condition=MatchConditions.IfNotModified) + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag="0x111111111111111", + match_condition=MatchConditions.IfNotModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -947,7 +1069,9 @@ async def test_upload_pages_from_url_with_if_none_match(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) source_blob_client = await self._create_source_blob(bsc, source_blob_data, 0, SOURCE_BLOB_SIZE) @@ -959,33 +1083,39 @@ async def test_upload_pages_from_url_with_if_none_match(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - etag='0x111111111111111', - match_condition=MatchConditions.IfModified) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag="0x111111111111111", + match_condition=MatchConditions.IfModified, + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - etag=blob_properties.get('etag'), - match_condition=MatchConditions.IfModified) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, + 0, + SOURCE_BLOB_SIZE, + 0, + etag=blob_properties.get("etag"), + match_condition=MatchConditions.IfModified, + ) @BlobPreparer() @recorded_by_proxy_async @@ -994,7 +1124,9 @@ async def test_upload_pages_from_url_with_sequence_number_lt(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1007,31 +1139,29 @@ async def test_upload_pages_from_url_with_sequence_number_lt(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lt=start_sequence + 1) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lt=start_sequence + 1 + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lt=start_sequence) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lt=start_sequence + ) @BlobPreparer() @recorded_by_proxy_async @@ -1040,7 +1170,9 @@ async def test_upload_pages_from_url_with_sequence_number_lte(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1053,31 +1185,29 @@ async def test_upload_pages_from_url_with_sequence_number_lte(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lte=start_sequence) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lte=start_sequence + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_lte=start_sequence - 1) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_lte=start_sequence - 1 + ) @BlobPreparer() @recorded_by_proxy_async @@ -1086,7 +1216,9 @@ async def test_upload_pages_from_url_with_sequence_number_eq(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) start_sequence = 10 source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) @@ -1099,31 +1231,29 @@ async def test_upload_pages_from_url_with_sequence_number_eq(self, **kwargs): snapshot=source_blob_client.snapshot, account_key=source_blob_client.credential.account_key, permission=BlobSasPermissions(read=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1)) + expiry=datetime.utcnow() + timedelta(hours=1), + ) destination_blob_client = await self._create_blob(bsc, SOURCE_BLOB_SIZE, sequence_number=start_sequence) # Act: make update page from url calls - resp = await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, - 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_eq=start_sequence) - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + resp = await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_eq=start_sequence + ) + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None # Assert the destination blob is constructed correctly blob_properties = await destination_blob_client.get_blob_properties() await self.assertBlobEqual(self.container_name, destination_blob_client.blob_name, source_blob_data, bsc) - assert blob_properties.get('etag') == resp.get('etag') - assert blob_properties.get('last_modified') == resp.get('last_modified') + assert blob_properties.get("etag") == resp.get("etag") + assert blob_properties.get("last_modified") == resp.get("last_modified") # Act part 2: put block from url with wrong md5 with pytest.raises(HttpResponseError): - await destination_blob_client.upload_pages_from_url(source_blob_client.url + "?" + sas, 0, - SOURCE_BLOB_SIZE, - 0, - if_sequence_number_eq=start_sequence + 1) + await destination_blob_client.upload_pages_from_url( + source_blob_client.url + "?" + sas, 0, SOURCE_BLOB_SIZE, 0, if_sequence_number_eq=start_sequence + 1 + ) @BlobPreparer() @recorded_by_proxy_async @@ -1131,17 +1261,19 @@ async def test_update_page_unicode(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act - data = u'abcdefghijklmnop' * 32 + data = "abcdefghijklmnop" * 32 resp = await blob.upload_page(data, offset=0, length=512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None @BlobPreparer() @recorded_by_proxy_async @@ -1154,7 +1286,7 @@ async def test_list_page_ranges(self, **kwargs): blob: BlobClient = await self._create_blob(bsc, length=2560) data = self.get_random_bytes(512) await blob.upload_page(data, offset=0, length=512) - await blob.upload_page(data*2, offset=1024, length=1024) + await blob.upload_page(data * 2, offset=1024, length=1024) # Act ranges = [] @@ -1271,7 +1403,7 @@ async def test_list_page_ranges_diff(self, **kwargs): async for r in blob.list_page_ranges(previous_snapshot=snapshot1): ranges1.append(r) ranges2 = [] - async for r in blob.list_page_ranges(previous_snapshot=snapshot2['snapshot']): + async for r in blob.list_page_ranges(previous_snapshot=snapshot2["snapshot"]): ranges2.append(r) # Assert @@ -1330,7 +1462,9 @@ async def test_get_page_ranges_no_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) @@ -1348,7 +1482,9 @@ async def test_get_page_ranges_2_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc, 2048) data = self.get_random_bytes(512) @@ -1362,10 +1498,10 @@ async def test_get_page_ranges_2_pages(self, **kwargs): assert ranges is not None assert isinstance(ranges, list) assert len(ranges) == 2 - assert ranges[0]['start'] == 0 - assert ranges[0]['end'] == 511 - assert ranges[1]['start'] == 1024 - assert ranges[1]['end'] == 1535 + assert ranges[0]["start"] == 0 + assert ranges[0]["end"] == 511 + assert ranges[1]["start"] == 1024 + assert ranges[1]["end"] == 1535 @BlobPreparer() @recorded_by_proxy_async @@ -1373,7 +1509,9 @@ async def test_get_page_ranges_diff(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc, 2048) data = self.get_random_bytes(1536) @@ -1384,7 +1522,7 @@ async def test_get_page_ranges_diff(self, **kwargs): # Act ranges1, cleared1 = await blob.get_page_ranges(previous_snapshot_diff=snapshot1) - ranges2, cleared2 = await blob.get_page_ranges(previous_snapshot_diff=snapshot2['snapshot']) + ranges2, cleared2 = await blob.get_page_ranges(previous_snapshot_diff=snapshot2["snapshot"]) # Assert assert ranges1 is not None @@ -1392,20 +1530,20 @@ async def test_get_page_ranges_diff(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 @pytest.mark.playback_test_only @BlobPreparer() @@ -1425,7 +1563,7 @@ async def test_get_page_range_diff_for_managed_disk(self, **kwargs): data = self.get_random_bytes(1536) snapshot1 = await blob.create_snapshot() - snapshot_blob1 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot1['snapshot']) + snapshot_blob1 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot1["snapshot"]) sas_token1 = self.generate_sas( generate_blob_sas, snapshot_blob1.account_name, @@ -1439,7 +1577,7 @@ async def test_get_page_range_diff_for_managed_disk(self, **kwargs): await blob.upload_page(data, offset=0, length=1536) snapshot2 = await blob.create_snapshot() - snapshot_blob2 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot2['snapshot']) + snapshot_blob2 = BlobClient.from_blob_url(blob.url, credential=credential, snapshot=snapshot2["snapshot"]) sas_token2 = self.generate_sas( generate_blob_sas, snapshot_blob2.account_name, @@ -1453,8 +1591,8 @@ async def test_get_page_range_diff_for_managed_disk(self, **kwargs): await blob.clear_page(offset=512, length=512) # Act - ranges1, cleared1 = await blob.get_page_range_diff_for_managed_disk(snapshot_blob1.url + '&' + sas_token1) - ranges2, cleared2 = await blob.get_page_range_diff_for_managed_disk(snapshot_blob2.url + '&' + sas_token2) + ranges1, cleared1 = await blob.get_page_range_diff_for_managed_disk(snapshot_blob1.url + "&" + sas_token1) + ranges2, cleared2 = await blob.get_page_range_diff_for_managed_disk(snapshot_blob2.url + "&" + sas_token2) # Assert assert ranges1 is not None @@ -1462,20 +1600,20 @@ async def test_get_page_range_diff_for_managed_disk(self, **kwargs): assert len(ranges1) == 2 assert isinstance(cleared1, list) assert len(cleared1) == 1 - assert ranges1[0]['start'] == 0 - assert ranges1[0]['end'] == 511 - assert cleared1[0]['start'] == 512 - assert cleared1[0]['end'] == 1023 - assert ranges1[1]['start'] == 1024 - assert ranges1[1]['end'] == 1535 + assert ranges1[0]["start"] == 0 + assert ranges1[0]["end"] == 511 + assert cleared1[0]["start"] == 512 + assert cleared1[0]["end"] == 1023 + assert ranges1[1]["start"] == 1024 + assert ranges1[1]["end"] == 1535 assert ranges2 is not None assert isinstance(ranges2, list) assert len(ranges2) == 0 assert isinstance(cleared2, list) assert len(cleared2) == 1 - assert cleared2[0]['start'] == 512 - assert cleared2[0]['end'] == 1023 + assert cleared2[0]["start"] == 512 + assert cleared2[0]["end"] == 1023 @BlobPreparer() @recorded_by_proxy_async @@ -1483,7 +1621,9 @@ async def test_update_page_fail(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc, 2048) data = self.get_random_bytes(512) @@ -1492,11 +1632,11 @@ async def test_update_page_fail(self, **kwargs): try: await blob.upload_page(data, offset=1024, length=513) except ValueError as e: - assert str(e) == 'length must be an integer that aligns with 512 page size' + assert str(e) == "length must be an integer that aligns with 512 page size" return # Assert - raise Exception('Page range validation failed to throw on failure case') + raise Exception("Page range validation failed to throw on failure case") @BlobPreparer() @recorded_by_proxy_async @@ -1504,7 +1644,9 @@ async def test_resize_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc, 1024) @@ -1512,9 +1654,9 @@ async def test_resize_blob(self, **kwargs): resp = await blob.resize_blob(512) # Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None props = await blob.get_blob_properties() assert isinstance(props, BlobProperties) assert props.size == 512 @@ -1525,17 +1667,19 @@ async def test_set_sequence_number_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = await self._create_blob(bsc) # Act resp = await blob.set_sequence_number(SequenceNumberAction.Update, 6) - #Assert - assert resp.get('etag') is not None - assert resp.get('last_modified') is not None - assert resp.get('blob_sequence_number') is not None + # Assert + assert resp.get("etag") is not None + assert resp.get("last_modified") is not None + assert resp.get("blob_sequence_number") is not None props = await blob.get_blob_properties() assert isinstance(props, BlobProperties) assert props.page_blob_sequence_number == 6 @@ -1546,7 +1690,9 @@ async def test_create_page_blob_with_no_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1554,25 +1700,19 @@ async def test_create_page_blob_with_no_overwrite(self, **kwargs): # Act create_resp = await blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data1'}) + data1, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data1"} + ) with pytest.raises(ResourceExistsError): - await blob.upload_blob( - data2, - overwrite=False, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data2'}) + await blob.upload_blob(data2, overwrite=False, blob_type=BlobType.PageBlob, metadata={"blobdata": "data2"}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data1, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') - assert props.metadata == {'blobdata': 'data1'} + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") + assert props.metadata == {"blobdata": "data1"} assert props.size == LARGE_BLOB_SIZE assert props.blob_type == BlobType.PageBlob @@ -1582,7 +1722,9 @@ async def test_create_page_blob_with_overwrite(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1590,23 +1732,19 @@ async def test_create_page_blob_with_overwrite(self, **kwargs): # Act create_resp = await blob.upload_blob( - data1, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data1'}) + data1, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data1"} + ) update_resp = await blob.upload_blob( - data2, - overwrite=True, - blob_type=BlobType.PageBlob, - metadata={'blobdata': 'data2'}) + data2, overwrite=True, blob_type=BlobType.PageBlob, metadata={"blobdata": "data2"} + ) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data2, bsc) - assert props.etag == update_resp.get('etag') - assert props.last_modified == update_resp.get('last_modified') - assert props.metadata == {'blobdata': 'data2'} + assert props.etag == update_resp.get("etag") + assert props.last_modified == update_resp.get("last_modified") + assert props.metadata == {"blobdata": "data2"} assert props.size == LARGE_BLOB_SIZE + 512 assert props.blob_type == BlobType.PageBlob @@ -1616,7 +1754,9 @@ async def test_create_blob_from_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1627,8 +1767,8 @@ async def test_create_blob_from_bytes(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1636,7 +1776,9 @@ async def test_create_blob_from_0_bytes(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(0) @@ -1647,8 +1789,8 @@ async def test_create_blob_from_0_bytes(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1656,27 +1798,29 @@ async def test_create_blob_from_bytes_with_progress_first(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) - create_resp = await blob.upload_blob( - data, blob_type=BlobType.PageBlob, raw_response_hook=callback) + create_resp = await blob.upload_blob(data, blob_type=BlobType.PageBlob, raw_response_hook=callback) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") self.assert_upload_progress(LARGE_BLOB_SIZE, self.config.max_page_size, progress) @BlobPreparer() @@ -1685,7 +1829,9 @@ async def test_create_blob_from_bytes_with_index(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1703,7 +1849,9 @@ async def test_create_blob_from_bytes_with_index_and_count(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1715,9 +1863,9 @@ async def test_create_blob_from_bytes_with_index_and_count(self, **kwargs): props = await blob.get_blob_properties() # Assert - await self.assertBlobEqual(self.container_name, blob.blob_name, data[index:index + count], bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + await self.assertBlobEqual(self.container_name, blob.blob_name, data[index : index + count], bsc) + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1725,7 +1873,9 @@ async def test_create_blob_from_path(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1739,8 +1889,8 @@ async def test_create_blob_from_path(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data, bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1748,16 +1898,19 @@ async def test_create_blob_from_path_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1776,7 +1929,9 @@ async def test_create_blob_from_stream(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1791,8 +1946,8 @@ async def test_create_blob_from_stream(self, **kwargs): # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data[:blob_size], bsc) - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1800,13 +1955,15 @@ async def test_create_blob_from_stream_with_empty_pages(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) # data is almost all empty (0s) except two ranges await self._setup(bsc) blob = self._get_blob_reference(bsc) data = bytearray(16 * 1024) - data[512: 1024] = self.get_random_bytes(512) - data[8192: 8196] = self.get_random_bytes(4) + data[512:1024] = self.get_random_bytes(512) + data[8192:8196] = self.get_random_bytes(4) # Act blob_size = len(data) @@ -1822,12 +1979,12 @@ async def test_create_blob_from_stream_with_empty_pages(self, **kwargs): ranges = await blob.get_page_ranges() page_ranges, cleared = list(ranges) assert len(page_ranges) == 2 - assert page_ranges[0]['start'] == 0 - assert page_ranges[0]['end'] == 4095 - assert page_ranges[1]['start'] == 8192 - assert page_ranges[1]['end'] == 12287 - assert props.etag == create_resp.get('etag') - assert props.last_modified == create_resp.get('last_modified') + assert page_ranges[0]["start"] == 0 + assert page_ranges[0]["end"] == 4095 + assert page_ranges[1]["start"] == 8192 + assert page_ranges[1]["end"] == 12287 + assert props.etag == create_resp.get("etag") + assert props.last_modified == create_resp.get("last_modified") @BlobPreparer() @recorded_by_proxy_async @@ -1835,7 +1992,9 @@ async def test_create_blob_from_stream_non_seekable(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1857,16 +2016,19 @@ async def test_create_blob_from_stream_with_progress(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1886,7 +2048,9 @@ async def test_create_blob_from_stream_truncated(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1907,16 +2071,19 @@ async def test_create_blob_from_stream_with_progress_truncated(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] + def callback(response): - current = response.context['upload_stream_current'] - total = response.context['data_stream_total'] + current = response.context["upload_stream_current"] + total = response.context["data_stream_total"] if current is not None: progress.append((current, total)) @@ -1936,7 +2103,9 @@ async def test_create_blob_with_md5_small(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(512) @@ -1952,10 +2121,14 @@ async def test_create_blob_with_md5_large(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) blob = self._get_blob_reference(bsc) data = self.get_random_bytes(LARGE_BLOB_SIZE) @@ -1971,7 +2144,9 @@ async def test_incremental_copy_blob(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) source_blob = await self._create_blob(bsc, 2048) data = self.get_random_bytes(512) @@ -1980,7 +2155,8 @@ async def test_incremental_copy_blob(self, **kwargs): source_snapshot_blob = await source_blob.create_snapshot() snapshot_blob = BlobClient.from_blob_url( - source_blob.url, credential=source_blob.credential, snapshot=source_snapshot_blob) + source_blob.url, credential=source_blob.credential, snapshot=source_snapshot_blob + ) sas_token = self.generate_sas( generate_blob_sas, snapshot_blob.account_name, @@ -1993,18 +2169,17 @@ async def test_incremental_copy_blob(self, **kwargs): ) sas_blob = BlobClient.from_blob_url(snapshot_blob.url, credential=sas_token) - # Act - dest_blob = bsc.get_blob_client(self.container_name, 'dest_blob') + dest_blob = bsc.get_blob_client(self.container_name, "dest_blob") copy = await dest_blob.start_copy_from_url(sas_blob.url, incremental_copy=True) # Assert assert copy is not None - assert copy['copy_id'] is not None - assert copy['copy_status'] == 'pending' + assert copy["copy_id"] is not None + assert copy["copy_status"] == "pending" copy_blob = await self._wait_for_async_copy(dest_blob) - assert copy_blob.copy.status == 'success' + assert copy_blob.copy.status == "success" assert copy_blob.copy.destination_snapshot is not None # strip off protocol @@ -2016,14 +2191,18 @@ async def test_blob_tier_on_create(self, **kwargs): premium_storage_account_name = kwargs.pop("premium_storage_account_name") premium_storage_account_key = kwargs.pop("premium_storage_account_key") - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) await self._setup(bsc) url = self.account_url(premium_storage_account_name, "blob") credential = premium_storage_account_key pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: @@ -2043,10 +2222,8 @@ async def test_blob_tier_on_create(self, **kwargs): pblob2 = pbs.get_blob_client(container_name, blob2.blob_name) byte_data = self.get_random_bytes(1024) await pblob2.upload_blob( - byte_data, - premium_page_blob_tier=PremiumPageBlobTier.P6, - blob_type=BlobType.PageBlob, - overwrite=True) + byte_data, premium_page_blob_tier=PremiumPageBlobTier.P6, blob_type=BlobType.PageBlob, overwrite=True + ) props2 = await pblob2.get_blob_properties() assert props2.blob_tier == PremiumPageBlobTier.P6 @@ -2058,7 +2235,12 @@ async def test_blob_tier_on_create(self, **kwargs): with tempfile.TemporaryFile() as temp_file: temp_file.write(byte_data) temp_file.seek(0) - await pblob3.upload_blob(temp_file, blob_type=BlobType.PageBlob, premium_page_blob_tier=PremiumPageBlobTier.P10, overwrite=True) + await pblob3.upload_blob( + temp_file, + blob_type=BlobType.PageBlob, + premium_page_blob_tier=PremiumPageBlobTier.P10, + overwrite=True, + ) props3 = await pblob3.get_blob_properties() assert props3.blob_tier == PremiumPageBlobTier.P10 @@ -2073,14 +2255,18 @@ async def test_blob_tier_set_tier_api(self, **kwargs): premium_storage_account_name = kwargs.pop("premium_storage_account_name") premium_storage_account_key = kwargs.pop("premium_storage_account_key") - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) await self._setup(bsc) url = self.account_url(premium_storage_account_name, "blob") credential = premium_storage_account_key pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: @@ -2134,14 +2320,18 @@ async def test_blob_tier_copy_blob(self, **kwargs): premium_storage_account_name = kwargs.pop("premium_storage_account_name") premium_storage_account_key = kwargs.pop("premium_storage_account_key") - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) await self._setup(bsc) url = self.account_url(premium_storage_account_name, "blob") credential = premium_storage_account_key pbs = BlobServiceClient(url, credential=credential) try: - container_name = self.get_resource_name('utpremiumcontainer') + container_name = self.get_resource_name("utpremiumcontainer") container = pbs.get_container_client(container_name) if self.is_live: @@ -2150,50 +2340,56 @@ async def test_blob_tier_copy_blob(self, **kwargs): except ResourceExistsError: pass - bsc = BlobServiceClient(self.account_url(premium_storage_account_name, "blob"), credential=premium_storage_account_key, max_page_size=4 * 1024) - source_blob = pbs.get_blob_client( - container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + bsc = BlobServiceClient( + self.account_url(premium_storage_account_name, "blob"), + credential=premium_storage_account_key, + max_page_size=4 * 1024, + ) + source_blob = pbs.get_blob_client(container_name, self.get_resource_name(TEST_BLOB_PREFIX)) await source_blob.create_page_blob(1024, premium_page_blob_tier=PremiumPageBlobTier.P10) # Act - source_blob_url = '{0}/{1}/{2}'.format( - self.account_url(premium_storage_account_name, "blob"), container_name, source_blob.blob_name) + source_blob_url = "{0}/{1}/{2}".format( + self.account_url(premium_storage_account_name, "blob"), container_name, source_blob.blob_name + ) - copy_blob = pbs.get_blob_client(container_name, 'blob1copy') + copy_blob = pbs.get_blob_client(container_name, "blob1copy") copy = await copy_blob.start_copy_from_url(source_blob_url, premium_page_blob_tier=PremiumPageBlobTier.P30) # Assert assert copy is not None - assert copy['copy_status'] == 'success' - assert copy['copy_id'] is not None + assert copy["copy_status"] == "success" + assert copy["copy_id"] is not None copy_ref = await copy_blob.get_blob_properties() assert copy_ref.blob_tier == PremiumPageBlobTier.P30 - source_blob2 = pbs.get_blob_client( - container_name, - self.get_resource_name(TEST_BLOB_PREFIX)) + source_blob2 = pbs.get_blob_client(container_name, self.get_resource_name(TEST_BLOB_PREFIX)) await source_blob2.create_page_blob(1024) - source_blob2_url = '{0}/{1}/{2}'.format( - self.account_url(premium_storage_account_name, "blob"), source_blob2.container_name, source_blob2.blob_name) - - copy_blob2 = pbs.get_blob_client(container_name, 'blob2copy') - copy2 = await copy_blob2.start_copy_from_url(source_blob2_url, premium_page_blob_tier=PremiumPageBlobTier.P60) + source_blob2_url = "{0}/{1}/{2}".format( + self.account_url(premium_storage_account_name, "blob"), + source_blob2.container_name, + source_blob2.blob_name, + ) + + copy_blob2 = pbs.get_blob_client(container_name, "blob2copy") + copy2 = await copy_blob2.start_copy_from_url( + source_blob2_url, premium_page_blob_tier=PremiumPageBlobTier.P60 + ) assert copy2 is not None - assert copy2['copy_status'] == 'success' - assert copy2['copy_id'] is not None + assert copy2["copy_status"] == "success" + assert copy2["copy_id"] is not None copy_ref2 = await copy_blob2.get_blob_properties() assert copy_ref2.blob_tier == PremiumPageBlobTier.P60 assert not copy_ref2.blob_tier_inferred - copy_blob3 = pbs.get_blob_client(container_name, 'blob3copy') + copy_blob3 = pbs.get_blob_client(container_name, "blob3copy") copy3 = await copy_blob3.start_copy_from_url(source_blob2_url) assert copy3 is not None - assert copy3['copy_status'] == 'success' - assert copy3['copy_id'] is not None + assert copy3["copy_status"] == "success" + assert copy3["copy_id"] is not None copy_ref3 = await copy_blob3.get_blob_properties() assert copy_ref3.blob_tier == PremiumPageBlobTier.P10 @@ -2205,9 +2401,11 @@ async def test_blob_tier_copy_blob(self, **kwargs): @recorded_by_proxy_async async def test_download_sparse_page_blob(self, storage_account_name, storage_account_key): # Arrange - bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024) + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), credential=storage_account_key, max_page_size=4 * 1024 + ) await self._setup(bsc) - self.config.max_single_get_size = 4*1024 + self.config.max_single_get_size = 4 * 1024 self.config.max_chunk_get_size = 1024 sparse_page_blob_size = 1024 * 1024 @@ -2216,8 +2414,8 @@ async def test_download_sparse_page_blob(self, storage_account_name, storage_acc # Act page_ranges, cleared = await blob_client.get_page_ranges() - start = page_ranges[0]['start'] - end = page_ranges[0]['end'] + start = page_ranges[0]["start"] + end = page_ranges[0]["end"] content = await blob_client.download_blob() content = await content.readall() @@ -2225,16 +2423,16 @@ async def test_download_sparse_page_blob(self, storage_account_name, storage_acc # Assert assert sparse_page_blob_size == len(content) # make sure downloaded data is the same as the uploaded data - assert data == content[start: end + 1] + assert data == content[start : end + 1] # assert all unlisted ranges are empty - for byte in content[:start-1]: + for byte in content[: start - 1]: try: - assert byte == '\x00' + assert byte == "\x00" except: assert byte == 0 - for byte in content[end+1:]: + for byte in content[end + 1 :]: try: - assert byte == '\x00' + assert byte == "\x00" except: assert byte == 0 @@ -2256,7 +2454,7 @@ async def test_download_sparse_page_blob_uneven_chunks(self, **kwargs): blob_client = self._get_blob_reference(bsc) await blob_client.create_page_blob(sparse_page_blob_size) - data = b'12345678' * 128 # 1024 bytes + data = b"12345678" * 128 # 1024 bytes range_start = 2 * 1024 + 512 await blob_client.upload_page(data, offset=range_start, length=len(data)) @@ -2271,10 +2469,10 @@ async def test_download_sparse_page_blob_uneven_chunks(self, **kwargs): start = r.start end = r.end - assert data == content[start: end + 1] - for byte in content[:start - 1]: + assert data == content[start : end + 1] + for byte in content[: start - 1]: assert byte == 0 - for byte in content[end + 1:]: + for byte in content[end + 1 :]: assert byte == 0 @BlobPreparer() @@ -2287,23 +2485,23 @@ async def test_upload_progress_chunked_non_parallel(self, **kwargs): await self._setup(bsc) blob_name = self.get_resource_name(TEST_BLOB_PREFIX) - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_page_size=1024) + max_single_put_size=1024, + max_page_size=1024, + ) await blob_client.upload_blob( - data, - blob_type=BlobType.PageBlob, - overwrite=True, - max_concurrency=1, - progress_hook=progress.assert_progress) + data, blob_type=BlobType.PageBlob, overwrite=True, max_concurrency=1, progress_hook=progress.assert_progress + ) # Assert progress.assert_complete() @@ -2319,25 +2517,26 @@ async def test_upload_progress_chunked_parallel(self, **kwargs): await self._setup(bsc) blob_name = self.get_resource_name(TEST_BLOB_PREFIX) - data = b'a' * 5 * 1024 + data = b"a" * 5 * 1024 progress = ProgressTracker(len(data), 1024) # Act blob_client = BlobClient( - self.account_url(storage_account_name, 'blob'), - self.container_name, blob_name, + self.account_url(storage_account_name, "blob"), + self.container_name, + blob_name, credential=storage_account_key, - max_single_put_size=1024, max_page_size=1024) + max_single_put_size=1024, + max_page_size=1024, + ) await blob_client.upload_blob( - data, - blob_type=BlobType.PageBlob, - overwrite=True, - max_concurrency=3, - progress_hook=progress.assert_progress) + data, blob_type=BlobType.PageBlob, overwrite=True, max_concurrency=3, progress_hook=progress.assert_progress + ) # Assert progress.assert_complete() -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_quick_query.py b/sdk/storage/azure-storage-blob/tests/test_quick_query.py index b16b730d0956..26586ca985e4 100644 --- a/sdk/storage/azure-storage-blob/tests/test_quick_query.py +++ b/sdk/storage/azure-storage-blob/tests/test_quick_query.py @@ -11,71 +11,71 @@ from devtools_testutils import recorded_by_proxy from settings.testcase import BlobPreparer from devtools_testutils.storage import StorageRecordedTestCase -from azure.storage.blob import ( - BlobServiceClient, - DelimitedJsonDialect, - DelimitedTextDialect -) +from azure.storage.blob import BlobServiceClient, DelimitedJsonDialect, DelimitedTextDialect # ------------------------------------------------------------------------------ from azure.storage.blob._models import ArrowDialect, ArrowType, QuickQueryDialect -CSV_DATA = b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' \ - b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' \ - b'\r\nEvent Hubs - Azure Storage CheckpointStore,' \ - b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' \ - b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' \ - b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' \ - b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' \ - b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' \ - b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' \ - b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' \ - b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' \ - b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' \ - b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' \ - b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' \ - b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' \ - b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' \ - b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' \ - b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' \ - b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' \ - b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' \ - b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' \ - b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' \ - b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' \ - b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' \ - b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' \ - b'Service,Package,Version,RepoPath,MissingDocs\r\n' \ - b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' - -CONVERTED_CSV_DATA = b"Service;Package;Version;RepoPath;MissingDocs.App Configuration;azure-data-appconfiguration;" \ - b"1;appconfiguration;FALSE.Event Hubs.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ - b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;FALSE.Key Vault - Certificates;" \ - b"azure-security-keyvault-certificates;'4.0.0';keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;" \ - b"'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;azure-security-keyvault-secrets;'4.1.0';keyvault;" \ - b"FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';storage;FALSE.Storage - Blobs Batch;" \ - b"azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ - b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ - b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ - b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ - b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ - b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ - b"'5.0.1';eventhubs;FALSE.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ - b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;" \ - b"FALSE.Key Vault - Certificates;azure-security-keyvault-certificates;'4.0.0';" \ - b"keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;" \ - b"azure-security-keyvault-secrets;'4.1.0';keyvault;FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';" \ - b"storage;FALSE.Storage - Blobs Batch;azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ - b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ - b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ - b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ - b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ - b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ - b"'5.0.1';eventhubs;FALSE." +CSV_DATA = ( + b"Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration," + b"azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs" + b"\r\nEvent Hubs - Azure Storage CheckpointStore," + b"azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity," + b"1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates," + b"4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault," + b"FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n" + b"Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch," + b"azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography," + b"azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares," + b"azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues," + b"azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics," + b"azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing," + b"azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath," + b"MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n" + b"Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n" + b"Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob," + b"1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n" + b"Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n" + b"Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n" + b"Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n" + b"Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n" + b"Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n" + b"Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n" + b"Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n" + b"Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n" + b"Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n" + b"Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n" + b"Service,Package,Version,RepoPath,MissingDocs\r\n" + b"App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n" + b"Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n" +) + +CONVERTED_CSV_DATA = ( + b"Service;Package;Version;RepoPath;MissingDocs.App Configuration;azure-data-appconfiguration;" + b"1;appconfiguration;FALSE.Event Hubs.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;FALSE.Key Vault - Certificates;" + b"azure-security-keyvault-certificates;'4.0.0';keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;" + b"'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;azure-security-keyvault-secrets;'4.1.0';keyvault;" + b"FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';storage;FALSE.Storage - Blobs Batch;" + b"azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" + b"'5.0.1';eventhubs;FALSE.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;" + b"FALSE.Key Vault - Certificates;azure-security-keyvault-certificates;'4.0.0';" + b"keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;" + b"azure-security-keyvault-secrets;'4.1.0';keyvault;FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';" + b"storage;FALSE.Storage - Blobs Batch;azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" + b"'5.0.1';eventhubs;FALSE." +) # ------------------------------------------------------------------------------ @@ -83,7 +83,7 @@ class TestStorageQuickQuery(StorageRecordedTestCase): def _setup(self, bsc): self.config = bsc._config - self.container_name = self.get_resource_name('utqqcontainer') + self.container_name = self.get_resource_name("utqqcontainer") if self.is_live: try: @@ -112,9 +112,7 @@ def test_quick_query_readall(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -133,7 +131,7 @@ def on_error(error): assert len(errors) == 0 assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'\n') + assert data, CSV_DATA.replace(b"\r\n" == b"\n") self._teardown(bsc) @BlobPreparer() @@ -143,9 +141,7 @@ def test_quick_query_iter_records(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -158,14 +154,14 @@ def test_quick_query_iter_records(self, **kwargs): # Assert first line has header data = next(read_records) - assert data == b'Service,Package,Version,RepoPath,MissingDocs' + assert data == b"Service,Package,Version,RepoPath,MissingDocs" for record in read_records: data += record assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data, CSV_DATA.replace(b"\r\n" == b"") self._teardown(bsc) @BlobPreparer() @@ -175,9 +171,7 @@ def test_quick_query_readall_with_encoding(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -190,13 +184,13 @@ def test_quick_query_readall_with_encoding(self, **kwargs): def on_error(error): errors.append(error) - reader = blob_client.query_blob("SELECT * from BlobStorage", on_error=on_error, encoding='utf-8') + reader = blob_client.query_blob("SELECT * from BlobStorage", on_error=on_error, encoding="utf-8") data = reader.readall() assert len(errors) == 0 assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'\n').decode('utf-8') + assert data, CSV_DATA.replace(b"\r\n" == b"\n").decode("utf-8") self._teardown(bsc) @BlobPreparer() @@ -206,9 +200,7 @@ def test_quick_query_iter_records_with_encoding(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -216,14 +208,14 @@ def test_quick_query_iter_records_with_encoding(self, **kwargs): blob_client = bsc.get_blob_client(self.container_name, blob_name) blob_client.upload_blob(CSV_DATA, overwrite=True) - reader = blob_client.query_blob("SELECT * from BlobStorage", encoding='utf-8') - data = '' + reader = blob_client.query_blob("SELECT * from BlobStorage", encoding="utf-8") + data = "" for record in reader.records(): data += record assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'').decode('utf-8') + assert data, CSV_DATA.replace(b"\r\n" == b"").decode("utf-8") self._teardown(bsc) @BlobPreparer() @@ -233,9 +225,7 @@ def test_quick_query_iter_output_records_excluding_headers(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -245,19 +235,21 @@ def test_quick_query_iter_output_records_excluding_headers(self, **kwargs): input_format = DelimitedTextDialect(has_header=True) output_format = DelimitedTextDialect(has_header=False) - reader = blob_client.query_blob("SELECT * from BlobStorage", blob_format=input_format, output_format=output_format) + reader = blob_client.query_blob( + "SELECT * from BlobStorage", blob_format=input_format, output_format=output_format + ) read_records = reader.records() # Assert first line does not include header data = next(read_records) - assert data == b'App Configuration,azure-data-appconfiguration,1,appconfiguration,FALSE' + assert data == b"App Configuration,azure-data-appconfiguration,1,appconfiguration,FALSE" for record in read_records: data += record assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'')[44:] + assert data, CSV_DATA.replace(b"\r\n" == b"")[44:] self._teardown(bsc) @BlobPreparer() @@ -267,9 +259,7 @@ def test_quick_query_iter_output_records_including_headers(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -283,14 +273,14 @@ def test_quick_query_iter_output_records_including_headers(self, **kwargs): # Assert first line does not include header data = next(read_records) - assert data == b'Service,Package,Version,RepoPath,MissingDocs' + assert data == b"Service,Package,Version,RepoPath,MissingDocs" for record in read_records: data += record assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data, CSV_DATA.replace(b"\r\n" == b"") self._teardown(bsc) @BlobPreparer() @@ -300,9 +290,7 @@ def test_quick_query_iter_records_with_progress(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -311,7 +299,7 @@ def test_quick_query_iter_records_with_progress(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) reader = blob_client.query_blob("SELECT * from BlobStorage") - data = b'' + data = b"" progress = 0 for record in reader.records(): if record: @@ -319,7 +307,7 @@ def test_quick_query_iter_records_with_progress(self, **kwargs): progress += len(record) + 2 assert len(reader) == len(CSV_DATA) assert reader._size == reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data, CSV_DATA.replace(b"\r\n" == b"") assert progress == reader._size self._teardown(bsc) @@ -330,9 +318,7 @@ def test_quick_query_readall_with_serialization_setting(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -346,23 +332,12 @@ def on_error(error): errors.append(error) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=False - ) - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=False ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) query_result = resp.readall() assert len(errors) == 0 @@ -377,9 +352,7 @@ def test_quick_query_iter_records_with_serialization_setting(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -388,23 +361,13 @@ def test_quick_query_iter_records_with_serialization_setting(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=False - ) - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='%', - escapechar='\\' + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=False ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator="%", escapechar="\\") reader = blob_client.query_blob( - "SELECT * from BlobStorage", - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", blob_format=input_format, output_format=output_format + ) data = [] for record in reader.records(): if record: @@ -422,19 +385,19 @@ def test_quick_query_readall_with_fatal_error_handler(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data3 = ( + b"{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:" + b"{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3," + b"shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:" + b"{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z," + b"data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}" + ) + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -447,22 +410,15 @@ def on_error(error): errors.append(error) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) query_result = resp.readall() assert len(errors) == 1 assert resp._size == 43 - assert query_result == b'' + assert query_result == b"" self._teardown(bsc) @BlobPreparer() @@ -472,19 +428,19 @@ def test_quick_query_iter_records_with_fatal_error_handler(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data3 = ( + b"{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:" + b"{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3," + b"shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:" + b"{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z," + b"data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}" + ) + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -497,24 +453,17 @@ def on_error(error): errors.append(error) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) data = [] for record in resp.records(): data.append(record) assert len(errors) == 1 assert resp._size == 43 - assert data == [b''] + assert data == [b""] self._teardown(bsc) @BlobPreparer() @@ -524,19 +473,19 @@ def test_quick_query_readall_with_fatal_error_handler_raise(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data3 = ( + b"{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:" + b"{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3," + b"shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:" + b"{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z," + b"data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}" + ) + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -549,17 +498,10 @@ def on_error(error): raise Exception(error.description) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) with pytest.raises(Exception): query_result = resp.readall() self._teardown(bsc) @@ -571,19 +513,19 @@ def test_quick_query_iter_records_with_fatal_error_handler_raise(self, **kwargs) storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data3 = ( + b"{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:" + b"{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3," + b"shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:" + b"{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z," + b"data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}" + ) + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -596,17 +538,10 @@ def on_error(error): raise Exception(error.description) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) with pytest.raises(Exception): for record in resp.records(): @@ -620,14 +555,12 @@ def test_quick_query_readall_with_fatal_error_ignore(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -635,16 +568,10 @@ def test_quick_query_readall_with_fatal_error_ignore(self, **kwargs): blob_client.upload_blob(data, overwrite=True) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", blob_format=input_format, output_format=output_format + ) query_result = resp.readall() self._teardown(bsc) @@ -655,19 +582,19 @@ def test_quick_query_iter_records_with_fatal_error_ignore(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{name: owner}' - data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b"{name: owner}" + data2 = b"{name2: owner2}" + data3 = ( + b"{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:" + b"{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3," + b"shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:" + b"{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z," + b"data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}" + ) + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -675,16 +602,10 @@ def test_quick_query_iter_records_with_fatal_error_ignore(self, **kwargs): blob_client.upload_blob(data, overwrite=True) input_format = DelimitedJsonDialect() - output_format = DelimitedTextDialect( - delimiter=';', - quotechar="'", - lineterminator='.', - escapechar='\\' - ) + output_format = DelimitedTextDialect(delimiter=";", quotechar="'", lineterminator=".", escapechar="\\") resp = blob_client.query_blob( - "SELECT * from BlobStorage", - blob_format=input_format, - output_format=output_format) + "SELECT * from BlobStorage", blob_format=input_format, output_format=output_format + ) for record in resp.records(): print(record) @@ -697,9 +618,7 @@ def test_quick_query_readall_with_nonfatal_error_handler(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -708,27 +627,22 @@ def test_quick_query_readall_with_nonfatal_error_handler(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) errors = [] + def on_error(error): errors.append(error) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=True + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=True ) output_format = DelimitedTextDialect( - delimiter=';', + delimiter=";", quotechar="'", - lineterminator='.', - escapechar='\\', + lineterminator=".", + escapechar="\\", ) resp = blob_client.query_blob( - "SELECT RepoPath from BlobStorage", - blob_format=input_format, - output_format=output_format, - on_error=on_error) + "SELECT RepoPath from BlobStorage", blob_format=input_format, output_format=output_format, on_error=on_error + ) query_result = resp.readall() # the error is because that line only has one column @@ -744,9 +658,7 @@ def test_quick_query_iter_records_with_nonfatal_error_handler(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -755,27 +667,22 @@ def test_quick_query_iter_records_with_nonfatal_error_handler(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) errors = [] + def on_error(error): errors.append(error) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=True + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=True ) output_format = DelimitedTextDialect( - delimiter=';', + delimiter=";", quotechar="'", - lineterminator='%', - escapechar='\\', + lineterminator="%", + escapechar="\\", ) resp = blob_client.query_blob( - "SELECT RepoPath from BlobStorage", - blob_format=input_format, - output_format=output_format, - on_error=on_error) + "SELECT RepoPath from BlobStorage", blob_format=input_format, output_format=output_format, on_error=on_error + ) data = list(resp.records()) # the error is because that line only has one column @@ -791,9 +698,7 @@ def test_quick_query_readall_with_nonfatal_error_ignore(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -802,22 +707,17 @@ def test_quick_query_readall_with_nonfatal_error_ignore(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=True + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=True ) output_format = DelimitedTextDialect( - delimiter=';', + delimiter=";", quotechar="'", - lineterminator='.', - escapechar='\\', + lineterminator=".", + escapechar="\\", ) resp = blob_client.query_blob( - "SELECT RepoPath from BlobStorage", - blob_format=input_format, - output_format=output_format) + "SELECT RepoPath from BlobStorage", blob_format=input_format, output_format=output_format + ) query_result = resp.readall() assert resp._size == len(CSV_DATA) assert len(query_result) > 0 @@ -830,9 +730,7 @@ def test_quick_query_iter_records_with_nonfatal_error_ignore(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the csv file @@ -841,22 +739,17 @@ def test_quick_query_iter_records_with_nonfatal_error_ignore(self, **kwargs): blob_client.upload_blob(CSV_DATA, overwrite=True) input_format = DelimitedTextDialect( - delimiter=',', - quotechar='"', - lineterminator='\n', - escapechar='', - has_header=True + delimiter=",", quotechar='"', lineterminator="\n", escapechar="", has_header=True ) output_format = DelimitedTextDialect( - delimiter=';', + delimiter=";", quotechar="'", - lineterminator='$', - escapechar='\\', + lineterminator="$", + escapechar="\\", ) resp = blob_client.query_blob( - "SELECT RepoPath from BlobStorage", - blob_format=input_format, - output_format=output_format) + "SELECT RepoPath from BlobStorage", blob_format=input_format, output_format=output_format + ) data = list(resp.records()) assert resp._size == len(CSV_DATA) assert len(data) == 32 @@ -869,14 +762,12 @@ def test_quick_query_readall_with_json_serialization_setting(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{\"name\": \"owner\", \"id\": 1}' - data2 = b'{\"name2\": \"owner2\"}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b'{"name": "owner", "id": 1}' + data2 = b'{"name2": "owner2"}' + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -884,17 +775,16 @@ def test_quick_query_readall_with_json_serialization_setting(self, **kwargs): blob_client.upload_blob(data, overwrite=True) errors = [] + def on_error(error): errors.append(error) - input_format = DelimitedJsonDialect(delimiter='\n') - output_format = DelimitedJsonDialect(delimiter=';') + input_format = DelimitedJsonDialect(delimiter="\n") + output_format = DelimitedJsonDialect(delimiter=";") resp = blob_client.query_blob( - "SELECT name from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT name from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) query_result = resp.readall() assert len(errors) == 0 @@ -909,14 +799,12 @@ def test_quick_query_iter_records_with_json_serialization_setting(self, **kwargs storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{\"name\": \"owner\", \"id\": 1}' - data2 = b'{\"name2\": \"owner2\"}' - data = data1 + b'\n' + data2 + b'\n' + data1 + data1 = b'{"name": "owner", "id": 1}' + data2 = b'{"name2": "owner2"}' + data = data1 + b"\n" + data2 + b"\n" + data1 # upload the json file blob_name = self._get_blob_reference() @@ -924,22 +812,21 @@ def test_quick_query_iter_records_with_json_serialization_setting(self, **kwargs blob_client.upload_blob(data, overwrite=True) errors = [] + def on_error(error): errors.append(error) - input_format = DelimitedJsonDialect(delimiter='\n') - output_format = DelimitedJsonDialect(delimiter=';') + input_format = DelimitedJsonDialect(delimiter="\n") + output_format = DelimitedJsonDialect(delimiter=";") resp = blob_client.query_blob( - "SELECT name from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT name from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) listdata = list(resp.records()) assert len(errors) == 0 assert resp._size == len(data) - assert listdata, [b'{"name":"owner"}',b'{}',b'{"name":"owner"}' == b''] + assert listdata, [b'{"name":"owner"}', b"{}", b'{"name":"owner"}' == b""] self._teardown(bsc) @BlobPreparer() @@ -949,13 +836,11 @@ def test_quick_query_with_only_input_json_serialization_setting(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data1 = b'{\"name\": \"owner\", \"id\": 1}' - data2 = b'{\"name2\": \"owner2\"}' + data1 = b'{"name": "owner", "id": 1}' + data2 = b'{"name2": "owner2"}' data = data1 + data2 + data1 # upload the json file @@ -964,17 +849,16 @@ def test_quick_query_with_only_input_json_serialization_setting(self, **kwargs): blob_client.upload_blob(data, overwrite=True) errors = [] + def on_error(error): errors.append(error) - input_format = DelimitedJsonDialect(delimiter='\n') + input_format = DelimitedJsonDialect(delimiter="\n") output_format = None resp = blob_client.query_blob( - "SELECT name from BlobStorage", - on_error=on_error, - blob_format=input_format, - output_format=output_format) + "SELECT name from BlobStorage", on_error=on_error, blob_format=input_format, output_format=output_format + ) query_result = resp.readall() assert len(errors) == 0 @@ -989,12 +873,10 @@ def test_quick_query_output_in_arrow_format(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) - data = b'100,200,300,400\n300,400,500,600\n' + data = b"100,200,300,400\n300,400,500,600\n" # upload the json file blob_name = self._get_blob_reference() @@ -1002,15 +884,15 @@ def test_quick_query_output_in_arrow_format(self, **kwargs): blob_client.upload_blob(data, overwrite=True) errors = [] + def on_error(error): errors.append(error) output_format = [ArrowDialect(ArrowType.DECIMAL, name="abc", precision=4, scale=2)] resp = blob_client.query_blob( - "SELECT _2 from BlobStorage WHERE _1 > 250", - on_error=on_error, - output_format=output_format) + "SELECT _2 from BlobStorage WHERE _1 > 250", on_error=on_error, output_format=output_format + ) query_result = base64.b64encode(resp.readall()) # expected_result = b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAUAAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQACAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAAAAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAP////+IAAAAFAAAAAAAAAAMABYABgAFAAgADAAMAAAAAAMEABgAAAAQAAAAAAAAAAAACgAYAAwABAAIAAoAAAA8AAAAEAAAAAEAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAJABAAAAAAAAAAAAAAAAAAA=' @@ -1026,9 +908,7 @@ def test_quick_query_input_in_arrow_format(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) # upload the json file @@ -1036,16 +916,14 @@ def test_quick_query_input_in_arrow_format(self, **kwargs): blob_client = bsc.get_blob_client(self.container_name, blob_name) errors = [] + def on_error(error): errors.append(error) input_format = [ArrowDialect(ArrowType.DECIMAL, name="abc", precision=4, scale=2)] with pytest.raises(ValueError): - blob_client.query_blob( - "SELECT * from BlobStorage", - on_error=on_error, - blob_format=input_format) + blob_client.query_blob("SELECT * from BlobStorage", on_error=on_error, blob_format=input_format) @BlobPreparer() @recorded_by_proxy @@ -1054,9 +932,7 @@ def test_quick_query_input_in_parquet_format(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) expression = "select * from blobstorage where id < 1;" expected_data = b"0,mdifjt55.ea3,mdifjt55.ea3\n" @@ -1079,9 +955,7 @@ def test_quick_query_output_in_parquet_format(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key) self._setup(bsc) expression = "SELECT * from BlobStorage" @@ -1092,7 +966,7 @@ def test_quick_query_output_in_parquet_format(self, **kwargs): blob_client.upload_blob(parquet_data, overwrite=True) with pytest.raises(ValueError): - blob_client.query_blob( - expression, blob_format="ParquetDialect", output_format="ParquetDialect") + blob_client.query_blob(expression, blob_format="ParquetDialect", output_format="ParquetDialect") + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_retry.py b/sdk/storage/azure-storage-blob/tests/test_retry.py index debf234e651b..4abc8f64542b 100644 --- a/sdk/storage/azure-storage-blob/tests/test_retry.py +++ b/sdk/storage/azure-storage-blob/tests/test_retry.py @@ -12,17 +12,11 @@ ClientAuthenticationError, HttpResponseError, ResourceExistsError, - ServiceResponseError + ServiceResponseError, ) from azure.core.pipeline.transport import RequestsTransport from azure.storage.blob._shared.authentication import AzureSigningError -from azure.storage.blob import ( - BlobClient, - BlobServiceClient, - ExponentialRetry, - LinearRetry, - LocationMode -) +from azure.storage.blob import BlobClient, BlobServiceClient, ExponentialRetry, LinearRetry, LocationMode from requests import Response from requests.exceptions import ContentDecodingError, ChunkedEncodingError, ReadTimeout @@ -33,6 +27,7 @@ class TimeoutRequestsTransport(RequestsTransport): """Transport to test read timeout""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.count = 0 @@ -61,7 +56,7 @@ def test_retry_on_server_error(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") service = self._create_storage_service(BlobServiceClient, storage_account_name, storage_account_key) # Force the create call to 'timeout' with a 408 @@ -85,10 +80,11 @@ def test_retry_on_timeout(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) callback = ResponseCallback(status=201, new_status=408).override_status @@ -110,17 +106,18 @@ def test_retry_callback_and_retry_context(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = LinearRetry(backoff=1) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status def assert_exception_is_present_on_retry_context(**kwargs): - assert kwargs.get('response') is not None - assert kwargs['response'].status_code == 408 + assert kwargs.get("response") is not None + assert kwargs["response"].status_code == 408 # Act try: @@ -128,8 +125,8 @@ def assert_exception_is_present_on_retry_context(**kwargs): # The retry will then get a 409 and return false. with pytest.raises(ResourceExistsError): service.create_container( - container_name, raw_response_hook=callback, - retry_hook=assert_exception_is_present_on_retry_context) + container_name, raw_response_hook=callback, retry_hook=assert_exception_is_present_on_retry_context + ) finally: service.delete_container(container_name) @@ -140,12 +137,12 @@ def test_retry_on_socket_timeout(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') - blob_name = self.get_resource_name('blob') + container_name = self.get_resource_name("utcontainer") + blob_name = self.get_resource_name("blob") # Upload a blob that can be downloaded to test read timeout service = self._create_storage_service(BlobServiceClient, storage_account_name, storage_account_key) container = service.create_container(container_name) - container.upload_blob(blob_name, b'Hello World', overwrite=True) + container.upload_blob(blob_name, b"Hello World", overwrite=True) retry = LinearRetry(backoff=1, random_jitter_range=1) timeout_transport = TimeoutRequestsTransport() @@ -154,7 +151,8 @@ def test_retry_on_socket_timeout(self, **kwargs): storage_account_name, storage_account_key, retry_policy=retry, - transport=timeout_transport) + transport=timeout_transport, + ) blob = timeout_service.get_blob_client(container_name, blob_name) # Act @@ -175,10 +173,10 @@ def test_no_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_total=0) - + BlobServiceClient, storage_account_name, storage_account_key, retry_total=0 + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status @@ -188,7 +186,7 @@ def test_no_retry(self, **kwargs): with pytest.raises(HttpResponseError) as error: service.create_container(container_name, raw_response_hook=callback) assert error.value.status_code == 408 - assert error.value.reason == 'Created' + assert error.value.reason == "Created" finally: service.delete_container(container_name) @@ -200,10 +198,11 @@ def test_linear_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = LinearRetry(backoff=1) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status @@ -226,10 +225,11 @@ def test_exponential_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=3, retry_total=3) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) try: container = service.create_container(container_name) @@ -242,7 +242,7 @@ def test_exponential_retry(self, **kwargs): container.get_container_properties(raw_response_hook=callback.override_status) # Assert the response was called the right number of times (1 initial request + 3 retries) - assert callback.count == 1+3 + assert callback.count == 1 + 3 finally: # Clean up service.delete_container(container_name) @@ -259,28 +259,28 @@ def test_exponential_retry_interval(self, **kwargs): for i in range(10): # Act - context_stub['count'] = 0 + context_stub["count"] = 0 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 1 assert 0 <= backoff <= 4 # Act - context_stub['count'] = 1 + context_stub["count"] = 1 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 4(1+3^1) assert 1 <= backoff <= 7 # Act - context_stub['count'] = 2 + context_stub["count"] = 2 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 10(1+3^2) assert 7 <= backoff <= 13 # Act - context_stub['count'] = 3 + context_stub["count"] = 3 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 28(1+3^3) @@ -324,10 +324,11 @@ def test_invalid_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to fail by pretending it's a teapot callback = ResponseCallback(status=201, new_status=418).override_status @@ -337,7 +338,7 @@ def test_invalid_retry(self, **kwargs): with pytest.raises(HttpResponseError) as error: service.create_container(container_name, raw_response_hook=callback) assert error.value.status_code == 418 - assert error.value.reason == 'Created' + assert error.value.reason == "Created" finally: service.delete_container(container_name) @@ -348,17 +349,18 @@ def test_retry_with_deserialization(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('retry') + container_name = self.get_resource_name("retry") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) try: created = service.create_container(container_name) # Act callback = ResponseCallback(status=200, new_status=408).override_first_status - containers = service.list_containers(name_starts_with='retry', raw_response_hook=callback) + containers = service.list_containers(name_starts_with="retry", raw_response_hook=callback) # Assert containers = list(containers) @@ -384,21 +386,23 @@ def test_retry_secondary(self, **kwargs): Might be changed to live only as loooooong test with a polling on the current geo-replication status. """ + # Arrange # Fail the first request and set the retry policy to retry to secondary # The given test account must be GRS class MockTransport(RequestsTransport): CALL_NUMBER = 1 ENABLE = False + def send(self, request, **kwargs): if MockTransport.ENABLE: if MockTransport.CALL_NUMBER == 2: - if request.method != 'PUT': - assert '-secondary' in request.url + if request.method != "PUT": + assert "-secondary" in request.url # Here's our hack # Replace with primary so the test works even # if secondary is not ready - request.url = request.url.replace('-secondary', '') + request.url = request.url.replace("-secondary", "") response = super(MockTransport, self).send(request, **kwargs) @@ -416,8 +420,7 @@ def send(self, request, **kwargs): retry = ExponentialRetry(retry_to_secondary=True, initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry, - transport=MockTransport() + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry, transport=MockTransport() ) # Act @@ -435,9 +438,10 @@ def put_retry_callback(retry_count=None, location_mode=None, **kwargs): assert LocationMode.PRIMARY == location_mode else: pytest.fail("This test is not supposed to retry more than once") + put_retry_callback.called = False - container = service.get_container_client('containername') + container = service.get_container_client("containername") created = container.create_container(retry_hook=put_retry_callback) assert put_retry_callback.called @@ -450,13 +454,13 @@ def retry_callback(retry_count=None, location_mode=None, **kwargs): assert LocationMode.SECONDARY == location_mode else: pytest.fail("This test is not supposed to retry more than once") + retry_callback.called = False # Try list MockTransport.CALL_NUMBER = 1 retry_callback.called = False - containers = service.list_containers( - results_per_page=1, retry_hook=retry_callback) + containers = service.list_containers(results_per_page=1, retry_hook=retry_callback) next(containers) assert retry_callback.called @@ -473,10 +477,11 @@ def test_invalid_account_key(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=3, retry_total=3) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) service.credential.account_name = "dummy_account_name" service.credential.account_key = "dummy_account_key" @@ -507,7 +512,7 @@ def count_wrapper(counter, func): class MyClass: def hello(self): pass - + obj = MyClass() counter = [0] obj.hello() @@ -517,10 +522,12 @@ def hello(self): print(counter[0]) # 2 ``` """ + @wraps(func) def inner(*args, **kwargs): counter[0] += 1 return func(*args, **kwargs) + return inner @BlobPreparer() @@ -531,9 +538,8 @@ def test_streaming_retry(self, **kwargs): """Test that retry mechanisms are working when streaming data.""" # Should check that multiple requests went through the pipeline - container_name = self.get_resource_name('utcontainer') - service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key) + container_name = self.get_resource_name("utcontainer") + service = self._create_storage_service(BlobServiceClient, storage_account_name, storage_account_key) container = service.get_container_client(container_name) container.create_container() assert container.exists() @@ -559,11 +565,7 @@ def test_invalid_storage_account_key(self, **kwargs): # Arrange blob_client = self._create_storage_service( - BlobClient, - storage_account_name, - storage_account_key, - container_name="foo", - blob_name="bar" + BlobClient, storage_account_name, storage_account_key, container_name="foo", blob_name="bar" ) retry_counter = RetryCounter() @@ -574,8 +576,10 @@ def test_invalid_storage_account_key(self, **kwargs): blob_client.get_blob_properties(retry_hook=retry_callback) # Assert - assert ("This is likely due to an invalid shared key. Please check your shared key and try again." in - e.value.message) + assert ( + "This is likely due to an invalid shared key. Please check your shared key and try again." + in e.value.message + ) assert retry_counter.count == 0 # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_retry_async.py b/sdk/storage/azure-storage-blob/tests/test_retry_async.py index 8cf71b3393aa..1955befd2662 100644 --- a/sdk/storage/azure-storage-blob/tests/test_retry_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_retry_async.py @@ -16,7 +16,7 @@ IncompleteReadError, HttpResponseError, ResourceExistsError, - ServiceResponseError + ServiceResponseError, ) from azure.core.pipeline.transport import AioHttpTransport from azure.storage.blob import LocationMode @@ -32,6 +32,7 @@ class TimeoutAioHttpTransport(AioHttpTransport): """Transport to test read timeout""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.count = 0 @@ -60,7 +61,7 @@ async def test_retry_on_server_error(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") service = self._create_storage_service(BlobServiceClient, storage_account_name, storage_account_key) # Force the create call to 'timeout' with a 408 @@ -84,10 +85,11 @@ async def test_retry_on_timeout(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) callback = ResponseCallback(status=201, new_status=408).override_status @@ -109,17 +111,18 @@ async def test_retry_callback_and_retry_context(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = LinearRetry(backoff=1) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status def assert_exception_is_present_on_retry_context(**kwargs): - assert kwargs.get('response') is not None - assert kwargs['response'].status_code == 408 + assert kwargs.get("response") is not None + assert kwargs["response"].status_code == 408 # Act try: @@ -127,8 +130,8 @@ def assert_exception_is_present_on_retry_context(**kwargs): # The retry will then get a 409 and return false. with pytest.raises(ResourceExistsError): await service.create_container( - container_name, raw_response_hook=callback, - retry_hook=assert_exception_is_present_on_retry_context) + container_name, raw_response_hook=callback, retry_hook=assert_exception_is_present_on_retry_context + ) finally: await service.delete_container(container_name) @@ -139,12 +142,12 @@ async def test_retry_on_socket_timeout(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') - blob_name = self.get_resource_name('blob') + container_name = self.get_resource_name("utcontainer") + blob_name = self.get_resource_name("blob") # Upload a blob that can be downloaded to test read timeout service = self._create_storage_service(BlobServiceClient, storage_account_name, storage_account_key) container = await service.create_container(container_name) - await container.upload_blob(blob_name, b'Hello World', overwrite=True) + await container.upload_blob(blob_name, b"Hello World", overwrite=True) retry = LinearRetry(backoff=1, random_jitter_range=1) timeout_transport = TimeoutAioHttpTransport() @@ -153,7 +156,8 @@ async def test_retry_on_socket_timeout(self, **kwargs): storage_account_name, storage_account_key, retry_policy=retry, - transport=timeout_transport) + transport=timeout_transport, + ) blob = timeout_service.get_blob_client(container_name, blob_name) # Act @@ -176,10 +180,10 @@ async def test_no_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_total=0) - + BlobServiceClient, storage_account_name, storage_account_key, retry_total=0 + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status @@ -189,7 +193,7 @@ async def test_no_retry(self, **kwargs): with pytest.raises(HttpResponseError) as error: await service.create_container(container_name, raw_response_hook=callback) assert error.value.status_code == 408 - assert error.value.reason == 'Created' + assert error.value.reason == "Created" finally: await service.delete_container(container_name) @@ -201,10 +205,11 @@ async def test_linear_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = LinearRetry(backoff=1) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to 'timeout' with a 408 callback = ResponseCallback(status=201, new_status=408).override_status @@ -227,10 +232,11 @@ async def test_exponential_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=3, retry_total=3) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) try: container = await service.create_container(container_name) @@ -243,7 +249,7 @@ async def test_exponential_retry(self, **kwargs): await container.get_container_properties(raw_response_hook=callback.override_status) # Assert the response was called the right number of times (1 initial request + 3 retries) - assert callback.count == 1+3 + assert callback.count == 1 + 3 finally: # Clean up await service.delete_container(container_name) @@ -260,28 +266,28 @@ async def test_exponential_retry_interval(self, **kwargs): for i in range(10): # Act - context_stub['count'] = 0 + context_stub["count"] = 0 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 1 assert 0 <= backoff <= 4 # Act - context_stub['count'] = 1 + context_stub["count"] = 1 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 4(1+3^1) assert 1 <= backoff <= 7 # Act - context_stub['count'] = 2 + context_stub["count"] = 2 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 10(1+3^2) assert 7 <= backoff <= 13 # Act - context_stub['count'] = 3 + context_stub["count"] = 3 backoff = retry_policy.get_backoff_time(context_stub) # Assert backoff interval is within +/- 3 of 28(1+3^3) @@ -325,10 +331,11 @@ async def test_invalid_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) # Force the create call to fail by pretending it's a teapot callback = ResponseCallback(status=201, new_status=418).override_status @@ -338,7 +345,7 @@ async def test_invalid_retry(self, **kwargs): with pytest.raises(HttpResponseError) as error: await service.create_container(container_name, raw_response_hook=callback) assert error.value.status_code == 418 - assert error.value.reason == 'Created' + assert error.value.reason == "Created" finally: await service.delete_container(container_name) @@ -349,17 +356,18 @@ async def test_retry_with_deserialization(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('retry') + container_name = self.get_resource_name("retry") retry = ExponentialRetry(initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) try: created = await service.create_container(container_name) # Act callback = ResponseCallback(status=200, new_status=408).override_first_status - containers = service.list_containers(name_starts_with='retry', raw_response_hook=callback) + containers = service.list_containers(name_starts_with="retry", raw_response_hook=callback) # Assert listed = [] @@ -387,21 +395,23 @@ async def test_retry_secondary(self, **kwargs): Might be changed to live only as loooooong test with a polling on the current geo-replication status. """ + # Arrange # Fail the first request and set the retry policy to retry to secondary # The given test account must be GRS class MockTransport(AioHttpTransport): CALL_NUMBER = 1 ENABLE = False + async def send(self, request, **kwargs): if MockTransport.ENABLE: if MockTransport.CALL_NUMBER == 2: - if request.method != 'PUT': - assert '-secondary' in request.url + if request.method != "PUT": + assert "-secondary" in request.url # Here's our hack # Replace with primary so the test works even # if secondary is not ready - request.url = request.url.replace('-secondary', '') + request.url = request.url.replace("-secondary", "") response = await super(MockTransport, self).send(request, **kwargs) @@ -419,8 +429,8 @@ async def send(self, request, **kwargs): retry = ExponentialRetry(retry_to_secondary=True, initial_backoff=1, increment_base=2) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry, - transport=MockTransport()) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry, transport=MockTransport() + ) # Act MockTransport.ENABLE = True @@ -437,9 +447,10 @@ def put_retry_callback(retry_count=None, location_mode=None, **kwargs): assert LocationMode.PRIMARY == location_mode else: pytest.fail("This test is not supposed to retry more than once") + put_retry_callback.called = False - container = service.get_container_client('containername') + container = service.get_container_client("containername") created = await container.create_container(retry_hook=put_retry_callback) assert put_retry_callback.called @@ -452,13 +463,13 @@ def retry_callback(retry_count=None, location_mode=None, **kwargs): assert LocationMode.SECONDARY == location_mode else: pytest.fail("This test is not supposed to retry more than once") + retry_callback.called = False # Try list MockTransport.CALL_NUMBER = 1 retry_callback.called = False - containers = service.list_containers( - results_per_page=1, retry_hook=retry_callback) + containers = service.list_containers(results_per_page=1, retry_hook=retry_callback) await containers.__anext__() assert retry_callback.called @@ -475,10 +486,11 @@ async def test_invalid_account_key(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - container_name = self.get_resource_name('utcontainer') + container_name = self.get_resource_name("utcontainer") retry = ExponentialRetry(initial_backoff=1, increment_base=3, retry_total=3) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) service.credential.account_name = "dummy_account_name" service.credential.account_key = "dummy_account_key" @@ -500,6 +512,7 @@ def _count_wrapper(counter, func): def wrapper(*args, **kwargs): counter[0] += 1 return func(*args, **kwargs) + return wrapper @pytest.mark.live_test_only @@ -509,11 +522,12 @@ async def test_streaming_retry(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") """Test that retry mechanisms are working when streaming data.""" - container_name = self.get_resource_name('utcontainer') - retry = LinearRetry(backoff = 0.1, random_jitter_range=0) + container_name = self.get_resource_name("utcontainer") + retry = LinearRetry(backoff=0.1, random_jitter_range=0) service = self._create_storage_service( - BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry) + BlobServiceClient, storage_account_name, storage_account_key, retry_policy=retry + ) container = service.get_container_client(container_name) await container.create_container() assert await container.exists() @@ -538,11 +552,7 @@ async def test_invalid_storage_account_key(self, **kwargs): # Arrange blob_client = self._create_storage_service( - BlobClient, - storage_account_name, - storage_account_key, - container_name="foo", - blob_name="bar" + BlobClient, storage_account_name, storage_account_key, container_name="foo", blob_name="bar" ) retry_counter = RetryCounter() @@ -553,8 +563,11 @@ async def test_invalid_storage_account_key(self, **kwargs): await blob_client.get_blob_properties(retry_hook=retry_callback) # Assert - assert ("This is likely due to an invalid shared key. Please check your shared key and try again." in - e.value.message) + assert ( + "This is likely due to an invalid shared key. Please check your shared key and try again." + in e.value.message + ) assert retry_counter.count == 0 + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_upload_chunking.py b/sdk/storage/azure-storage-blob/tests/test_upload_chunking.py index 310e980367f8..cb6896ce2f10 100644 --- a/sdk/storage/azure-storage-blob/tests/test_upload_chunking.py +++ b/sdk/storage/azure-storage-blob/tests/test_upload_chunking.py @@ -23,7 +23,7 @@ def test_sub_stream_with_length_larger_than_buffer(self, **kwargs): # assuming the max size of the buffer is 4MB, this test needs to be updated if that has changed # the block size is 6MB for this test - expected_data = data[0: 6 * 1024 * 1024] + expected_data = data[0 : 6 * 1024 * 1024] wrapped_stream = BytesIO(data) # simulate stream given by user lockObj = Lock() # simulate multi-threaded environment substream = SubStream(wrapped_stream, stream_begin_index=0, length=6 * 1024 * 1024, lockObj=lockObj) @@ -75,7 +75,7 @@ def test_sub_stream_with_length_equal_to_buffer(self, **kwargs): # assuming the max size of the buffer is 4MB, this test needs to be updated if that has changed # the block size is 2MB for this test - expected_data = data[0: 2 * 1024 * 1024] + expected_data = data[0 : 2 * 1024 * 1024] wrapped_stream = BytesIO(expected_data) # simulate stream given by user lockObj = Lock() # simulate multi-threaded environment substream = SubStream(wrapped_stream, stream_begin_index=0, length=2 * 1024 * 1024, lockObj=lockObj) diff --git a/sdk/storage/azure-storage-blob/tsp-location.yaml b/sdk/storage/azure-storage-blob/tsp-location.yaml new file mode 100644 index 000000000000..b070631da52d --- /dev/null +++ b/sdk/storage/azure-storage-blob/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/storage/Microsoft.BlobStroage +commit: 9e20a84f2bd17e28282cb7fc4f32c9db2dbe3b3f +repo: test-repo-billy/azure-rest-api-specs +additionalDirectories: